gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2012 - 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.frank.search.solr.core.convert;
import com.frank.search.solr.core.mapping.SolrPersistentEntity;
import com.frank.search.solr.core.mapping.SolrPersistentProperty;
import com.frank.search.solr.core.query.Criteria;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.CollectionFactory;
import org.springframework.data.convert.EntityInstantiator;
import org.springframework.data.convert.EntityInstantiators;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.PropertyHandler;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
import org.springframework.data.mapping.model.ParameterValueProvider;
import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider;
import org.springframework.data.mapping.model.PropertyValueProvider;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.util.TypeInformation;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
* Implementation of {@link SolrConverter} to read/write
* {@link org.apache.solr.common.SolrDocument}/
* {@link org.apache.solr.common.SolrInputDocument}. <br/>
*
* @author Christoph Strobl
* @author Francisco Spaeth
*/
public class MappingSolrConverter extends SolrConverterBase
implements SolrConverter, ApplicationContextAware, InitializingBean {
private enum WildcardPosition {
LEADING {
@Override
public boolean match(String fieldName, String candidate) {
return StringUtils.endsWith(candidate, removeWildcard(fieldName));
}
@Override
public String extractName(String fieldName, String dynamicFieldName) {
Assert.isTrue(match(fieldName, dynamicFieldName), "dynamicFieldName must be derivated from fieldName");
return StringUtils.removeEnd(dynamicFieldName, removeWildcard(fieldName));
}
@Override
public String createName(String fieldName, String name) {
return removeWildcard(fieldName) + name;
}
},
TRAILING {
@Override
public boolean match(String fieldName, String candidate) {
return StringUtils.startsWith(candidate, removeWildcard(fieldName));
}
@Override
public String extractName(String fieldName, String dynamicFieldName) {
Assert.isTrue(match(fieldName, dynamicFieldName), "dynamicFieldName must be derivated from fieldName");
return StringUtils.removeStart(dynamicFieldName, removeWildcard(fieldName));
}
@Override
public String createName(String fieldName, String name) {
return name + removeWildcard(fieldName);
}
};
public static WildcardPosition getAppropriate(String fieldName) {
if (StringUtils.startsWith(fieldName, Criteria.WILDCARD)) {
return WildcardPosition.LEADING;
} else {
return WildcardPosition.TRAILING;
}
}
String removeWildcard(String fieldName) {
return StringUtils.remove(fieldName, Criteria.WILDCARD);
}
public abstract boolean match(String fieldName, String candidate);
public abstract String extractName(String fieldName, String dynamicFieldName);
public abstract String createName(String fieldName, String name);
}
private final MappingContext<? extends SolrPersistentEntity<?>, SolrPersistentProperty> mappingContext;
private final EntityInstantiators instantiators = new EntityInstantiators();
@SuppressWarnings("unused") //
private ApplicationContext applicationContext;
public MappingSolrConverter(
MappingContext<? extends SolrPersistentEntity<?>, SolrPersistentProperty> mappingContext) {
Assert.notNull(mappingContext);
this.mappingContext = mappingContext;
}
@Override
public MappingContext<? extends SolrPersistentEntity<?>, SolrPersistentProperty> getMappingContext() {
return mappingContext;
}
@Override
public <S, R> List<R> read(SolrDocumentList source, Class<R> type) {
if (source == null) {
return Collections.emptyList();
}
List<R> resultList = new ArrayList<R>(source.size());
TypeInformation<R> typeInformation = ClassTypeInformation.from(type);
for (Map<String, ?> item : source) {
resultList.add(read(typeInformation, item));
}
return resultList;
}
@Override
public <R> R read(Class<R> type, Map<String, ?> source) {
return read(ClassTypeInformation.from(type), source);
}
@SuppressWarnings("unchecked")
protected <S extends Object> S read(TypeInformation<S> targetTypeInformation, Map<String, ?> source) {
if (source == null) {
return null;
}
Assert.notNull(targetTypeInformation);
Class<S> rawType = targetTypeInformation.getType();
// in case there's a custom conversion for the document
if (hasCustomReadTarget(source.getClass(), rawType)) {
return convert(source, rawType);
}
SolrPersistentEntity<S> entity = (SolrPersistentEntity<S>) mappingContext.getPersistentEntity(rawType);
return read(entity, source, null);
}
private <S extends Object> S read(final SolrPersistentEntity<S> entity, final Map<String, ?> source,
Object parent) {
ParameterValueProvider<SolrPersistentProperty> parameterValueProvider = getParameterValueProvider(entity,
source, parent);
EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity);
final S instance = instantiator.createInstance(entity, parameterValueProvider);
final PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor(entity.getPropertyAccessor(instance),
getConversionService());
entity.doWithProperties(new PropertyHandler<SolrPersistentProperty>() {
@Override
public void doWithPersistentProperty(SolrPersistentProperty persistentProperty) {
if (entity.isConstructorArgument(persistentProperty)) {
return;
}
Object o = getValue(persistentProperty, source, instance);
if (o != null) {
accessor.setProperty(persistentProperty, o);
}
}
});
return instance;
}
protected Object getValue(SolrPersistentProperty property, Object source, Object parent) {
SolrPropertyValueProvider provider = new SolrPropertyValueProvider(source, parent);
return provider.getPropertyValue(property);
}
private ParameterValueProvider<SolrPersistentProperty> getParameterValueProvider(SolrPersistentEntity<?> entity,
Map<String, ?> source, Object parent) {
SolrPropertyValueProvider provider = new SolrPropertyValueProvider(source, parent);
PersistentEntityParameterValueProvider<SolrPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<SolrPersistentProperty>(
entity, provider, parent);
return parameterProvider;
}
@SuppressWarnings("unchecked")
@Override
public void write(Object source, @SuppressWarnings("rawtypes") Map target) {
if (source == null) {
return;
}
Class<? extends Object> sourceClass = source.getClass();
if (hasCustomWriteTarget(sourceClass, SolrInputDocument.class)
&& canConvert(sourceClass, SolrInputDocument.class)) {
SolrInputDocument convertedDocument = convert(source, SolrInputDocument.class);
target.putAll(convertedDocument);
} else {
SolrPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
write(source, target, entity);
}
}
@SuppressWarnings("rawtypes")
protected void write(Object source, final Map target, SolrPersistentEntity<?> entity) {
final PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor(entity.getPropertyAccessor(source),
getConversionService());
entity.doWithProperties(new PropertyHandler<SolrPersistentProperty>() {
@SuppressWarnings("unchecked")
@Override
public void doWithPersistentProperty(SolrPersistentProperty persistentProperty) {
Object value = accessor.getProperty(persistentProperty);
if (value == null || persistentProperty.isReadonly()) {
return;
}
if (persistentProperty.containsWildcard() && !persistentProperty.isMap()) {
throw new IllegalArgumentException("Field '" + persistentProperty.getFieldName()
+ "' must not contain wildcards. Consider excluding Field from beeing indexed.");
}
Collection<SolrInputField> fields;
if (persistentProperty.isMap() && persistentProperty.containsWildcard()) {
fields = writeWildcardMapPropertyToTarget(target, persistentProperty, (Map<?, ?>) value);
} else {
fields = writeRegularPropertyToTarget(target, persistentProperty, value);
}
if (persistentProperty.isBoosted()) {
for (SolrInputField field : fields) {
field.setBoost(persistentProperty.getBoost());
}
}
}
});
if (entity.isBoosted() && target instanceof SolrInputDocument) {
((SolrInputDocument) target).setDocumentBoost(entity.getBoost());
}
}
private Collection<SolrInputField> writeWildcardMapPropertyToTarget(Map<? super Object, ? super Object> target,
SolrPersistentProperty persistentProperty, Map<?, ?> fieldValue) {
TypeInformation<?> mapTypeInformation = persistentProperty.getTypeInformation().getMapValueType();
Class<?> rawMapType = mapTypeInformation.getType();
String fieldName = persistentProperty.getFieldName();
Collection<SolrInputField> fields = new ArrayList<SolrInputField>();
for (Map.Entry<?, ?> entry : fieldValue.entrySet()) {
Object value = entry.getValue();
String key = entry.getKey().toString();
if (persistentProperty.isDynamicProperty()) {
key = WildcardPosition.getAppropriate(key).createName(fieldName, key);
}
SolrInputField field = new SolrInputField(key);
if (value instanceof Iterable) {
for (Object o : (Iterable<?>) value) {
field.addValue(convertToSolrType(rawMapType, o), 1f);
}
} else {
if (rawMapType.isArray()) {
for (Object o : (Object[]) value) {
field.addValue(convertToSolrType(rawMapType, o), 1f);
}
} else {
field.addValue(convertToSolrType(rawMapType, value), 1f);
}
}
target.put(key, field);
fields.add(field);
}
return fields;
}
private Collection<SolrInputField> writeRegularPropertyToTarget(final Map<? super Object, ? super Object> target,
SolrPersistentProperty persistentProperty, Object fieldValue) {
SolrInputField field = new SolrInputField(persistentProperty.getFieldName());
if (persistentProperty.isCollectionLike()) {
Collection<?> collection = asCollection(fieldValue);
for (Object o : collection) {
if (o != null) {
field.addValue(convertToSolrType(persistentProperty.getType(), o), 1f);
}
}
} else {
field.setValue(convertToSolrType(persistentProperty.getType(), fieldValue), 1f);
}
target.put(persistentProperty.getFieldName(), field);
return Collections.singleton(field);
}
private Object convertToSolrType(Class<?> type, Object value) {
if (type == null || value == null) {
return value;
}
if (isSimpleType(type)) {
return value;
} else if (hasCustomWriteTarget(value.getClass())) {
Class<?> targetType = getCustomWriteTargetType(value.getClass());
if (canConvert(value.getClass(), targetType)) {
return convert(value, targetType);
}
}
return value;
}
private static Collection<?> asCollection(Object source) {
if (source instanceof Collection) {
return (Collection<?>) source;
}
return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
private class SolrPropertyValueProvider implements PropertyValueProvider<SolrPersistentProperty> {
private final Object source;
private final Object parent;
public SolrPropertyValueProvider(Object source, Object parent) {
this.source = source;
this.parent = parent;
}
@SuppressWarnings("unchecked")
@Override
public <T> T getPropertyValue(SolrPersistentProperty property) {
if (source instanceof Map<?, ?>) {
return (T) readValue((Map<String, ?>) source, property, parent);
}
return readValue(source, property.getTypeInformation(), this.parent);
}
@SuppressWarnings("unchecked")
private <T> T readValue(Map<String, ?> value, SolrPersistentProperty property, Object parent) {
if (value == null) {
return null;
}
if (property.containsWildcard()) {
return (T) readWildcard(value, property, parent);
}
if (property.isScoreProperty()) {
return (T) readScore(value, property, parent);
}
return readValue(value.get(property.getFieldName()), property.getTypeInformation(), parent);
}
@SuppressWarnings("unchecked")
private <T> T readScore(Map<String, ?> value, SolrPersistentProperty property, Object parent) {
return (T) value.get("score");
}
@SuppressWarnings("unchecked")
private <T> T readValue(Object value, TypeInformation<?> type, Object parent) {
if (value == null) {
return null;
}
Assert.notNull(type);
Class<?> rawType = type.getType();
if (hasCustomReadTarget(value.getClass(), rawType)) {
return (T) convert(value, rawType);
}
Object documentValue = null;
if (value instanceof SolrInputField) {
documentValue = ((SolrInputField) value).getValue();
} else {
documentValue = value;
}
if (documentValue instanceof Collection) {
return (T) readCollection((Collection<?>) documentValue, type, parent);
} else if (canConvert(documentValue.getClass(), rawType)) {
return (T) convert(documentValue, rawType);
}
return (T) documentValue;
}
private Object readWildcard(Map<String, ?> source, SolrPersistentProperty property, Object parent) {
WildcardPosition wildcardPosition = WildcardPosition.getAppropriate(property.getFieldName());
if (property.isMap()) {
return readWildcardMap(source, property, parent, wildcardPosition);
} else if (property.isCollectionLike()) {
return readWildcardCollectionLike(source, property, parent, wildcardPosition);
} else {
for (Map.Entry<String, ?> potentialMatch : source.entrySet()) {
if (wildcardPosition.match(property.getFieldName(), potentialMatch.getKey())) {
return getValue(property, potentialMatch.getValue(), parent);
}
}
}
return null;
}
private Object readWildcardCollectionLike(Map<String, ?> source, SolrPersistentProperty property, Object parent,
WildcardPosition wildcardPosition) {
Class<?> genericTargetType = property.getComponentType() != null ? property.getComponentType()
: Object.class;
List<Object> values = new ArrayList<Object>();
for (Map.Entry<String, ?> potentialMatch : source.entrySet()) {
if (!wildcardPosition.match(property.getFieldName(), potentialMatch.getKey())) {
continue;
}
Object value = potentialMatch.getValue();
if (value instanceof Iterable) {
for (Object o : (Iterable<?>) value) {
values.add(readValue(property, o, parent, genericTargetType));
}
} else {
Object o = readValue(property, potentialMatch.getValue(), parent, genericTargetType);
if (o instanceof Collection) {
values.addAll((Collection<?>) o);
} else {
values.add(o);
}
}
}
return values.isEmpty() ? null : (property.isArray() ? values.toArray() : values);
}
private Object readWildcardMap(Map<String, ?> source, SolrPersistentProperty property, Object parent,
WildcardPosition wildcardPosition) {
TypeInformation<?> mapTypeInformation = property.getTypeInformation().getMapValueType();
Class<?> rawMapType = mapTypeInformation.getType();
Class<?> genericTargetType;
if (mapTypeInformation.getTypeArguments() != null && !mapTypeInformation.getTypeArguments().isEmpty()) {
genericTargetType = mapTypeInformation.getTypeArguments().get(0).getType();
} else {
genericTargetType = Object.class;
}
Map<String, Object> values;
if (LinkedHashMap.class.isAssignableFrom(property.getActualType())) {
values = new LinkedHashMap<String, Object>();
} else {
values = new HashMap<String, Object>();
}
for (Map.Entry<String, ?> potentialMatch : source.entrySet()) {
String key = potentialMatch.getKey();
if (!wildcardPosition.match(property.getFieldName(), key)) {
continue;
}
if (property.isDynamicProperty()) {
key = wildcardPosition.extractName(property.getFieldName(), key);
}
Object value = potentialMatch.getValue();
if (value instanceof Iterable) {
if (rawMapType.isArray() || ClassUtils.isAssignable(rawMapType, value.getClass())) {
List<Object> nestedValues = new ArrayList<Object>();
for (Object o : (Iterable<?>) value) {
nestedValues.add(readValue(property, o, parent, genericTargetType));
}
values.put(key, (rawMapType.isArray() ? nestedValues.toArray() : nestedValues));
} else {
throw new IllegalArgumentException(
"Incompartible types found. Expected " + rawMapType + " for " + property.getName()
+ " with name " + property.getFieldName() + ", but found " + value.getClass());
}
} else {
if (rawMapType.isArray() || ClassUtils.isAssignable(rawMapType, List.class)) {
ArrayList<Object> singletonArrayList = new ArrayList<Object>(1);
Object read = readValue(property, value, parent, genericTargetType);
singletonArrayList.add(read);
values.put(key, (rawMapType.isArray() ? singletonArrayList.toArray() : singletonArrayList));
} else {
values.put(key, getValue(property, value, parent));
}
}
}
return values.isEmpty() ? null : values;
}
private Object readValue(SolrPersistentProperty property, Object o, Object parent, Class<?> target) {
Object value = getValue(property, o, parent);
if (value == null || target == null || target.equals(Object.class)) {
return value;
}
if (canConvert(value.getClass(), target)) {
return convert(value, target);
}
return value;
}
private Object readCollection(Collection<?> source, TypeInformation<?> type, Object parent) {
Assert.notNull(type);
Class<?> collectionType = type.getType();
if (CollectionUtils.isEmpty(source)) {
return source;
}
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
Collection<Object> items;
if (type.getType().isArray()) {
items = new ArrayList<Object>();
} else {
items = CollectionFactory.createCollection(collectionType, source.size());
}
TypeInformation<?> componentType = type.getComponentType();
Iterator<?> it = source.iterator();
while (it.hasNext()) {
items.add(readValue(it.next(), componentType, parent));
}
return type.getType().isArray() ? convertItemsToArrayOfType(type, items) : items;
}
private Object convertItemsToArrayOfType(TypeInformation<?> type, Collection<Object> items) {
Object[] newArray = (Object[]) java.lang.reflect.Array.newInstance(type.getActualType().getType(),
items.size());
Object[] itemsArray = items.toArray();
for (int i = 0; i < itemsArray.length; i++) {
newArray[i] = itemsArray[i];
}
return newArray;
}
}
}
| |
package fairies.old.client;
import org.lwjgl.opengl.GL11;
import net.minecraft.client.Minecraft;
public class FRY_RenderFairy extends RenderLiving
{
public FRY_RenderFairy(FRY_ModelFairy modelfairy, float f)
{
super(modelfairy, f);
fairyModel = modelfairy;
fairyModel2 = new FRY_ModelFairyProps();
fairyModel3 = new FRY_ModelFairyEyes();
fairyModel4 = new FRY_ModelFairy(0.015625F);
fairyModel5 = new FRY_ModelFairyProps2();
}
protected void preRenderCallback(EntityLiving entityliving, float f)
{
FRY_EntityFairy fairy = (FRY_EntityFairy)entityliving;
float f1 = 0.875F;
fairyModel.sinage = fairy.sinage;
fairyModel.flymode = fairy.flymode();
fairyModel.showCrown = fairy.tamed() || fairy.queen();
fairyModel.isSneak = fairy.getFlag(1);
fairyModel.scoutWings = fairy.scout();
fairyModel.rogueParts = fairy.rogue();
fairyModel.hairType = fairy.hairType();
GL11.glScalef(f1, f1, f1);
if (entityliving.getFlag(1))
{
GL11.glTranslatef(0F, (5F / 16F), 0F);
}
}
protected void renderEquippedItems(EntityLiving entityliving, float f)
{
ItemStack itemstack = entityliving.getHeldItem();
if (itemstack != null)
{
GL11.glPushMatrix();
fairyModel.bipedRightArm.postRender(0.0625F);
GL11.glTranslatef(0.0F, 0.1F, 0.0F);
if (itemstack.itemID < 256 && RenderBlocks.renderItemIn3d(Block.blocksList[itemstack.itemID].getRenderType()))
{
float f1 = 0.5F;
GL11.glTranslatef(0.0F, 0.1875F, -0.3125F);
f1 *= 0.75F;
GL11.glRotatef(20F, 1.0F, 0.0F, 0.0F);
GL11.glRotatef(45F, 0.0F, 1.0F, 0.0F);
GL11.glScalef(f1, -f1, f1);
}
else if (Item.itemsList[itemstack.itemID].isFull3D())
{
float f2 = 0.625F;
GL11.glTranslatef(0.0F, 0.1875F, 0.0F);
GL11.glScalef(f2, -f2, f2);
GL11.glRotatef(-100F, 1.0F, 0.0F, 0.0F);
GL11.glRotatef(45F, 0.0F, 1.0F, 0.0F);
}
else
{
float f3 = 0.375F;
GL11.glTranslatef(0.25F, 0.1875F, -0.1875F);
GL11.glScalef(f3, f3, f3);
GL11.glRotatef(60F, 0.0F, 0.0F, 1.0F);
GL11.glRotatef(-90F, 1.0F, 0.0F, 0.0F);
GL11.glRotatef(20F, 0.0F, 0.0F, 1.0F);
}
if (itemstack.itemID == Item.potion.shiftedIndex)
{
int j = itemstack.getItem().getColorFromDamage(itemstack.getItemDamage(), 0);
float f9 = (float)(j >> 16 & 0xff) / 255F;
float f10 = (float)(j >> 8 & 0xff) / 255F;
float f11 = (float)(j & 0xff) / 255F;
GL11.glColor4f(f9, f10, f11, 1.0F);
renderManager.itemRenderer.renderItem(entityliving, itemstack, 0);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
renderManager.itemRenderer.renderItem(entityliving, itemstack, 1);
}
else
{
renderManager.itemRenderer.renderItem(entityliving, itemstack, 0);
}
GL11.glPopMatrix();
}
}
protected int setFairyBrightness(FRY_EntityFairy fairy, int i, float f)
{
if (i == 0 && (fairy.withered() || fairy.rogue())) //Render Withered Skin.
{
float transp = 0.7F;
if (fairy.queen())
{
if (fairy.getSkin() > 1)
{
loadTexture("/fairy/fairyWithered3.png");
}
else
{
loadTexture("/fairy/fairyWithered2.png");
}
}
else
{
loadTexture("/fairy/fairyWithered1.png");
}
setRenderPassModel(fairyModel4);
fairyModel4.sinage = fairy.sinage;
fairyModel4.flymode = fairy.flymode();
fairyModel4.showCrown = fairy.tamed() || fairy.queen();
fairyModel4.isSneak = fairy.getFlag(1);
fairyModel4.scoutWings = fairy.scout();
fairyModel4.onGround = fairyModel.onGround;
fairyModel4.rogueParts = fairy.rogue();
fairyModel4.hairType = fairy.hairType();
GL11.glColor4f(0.7F, 0.7F, 0.7F, transp);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
return 1;
}
else if (i == 1) //Render Fairy Eyes.
{
loadTexture(fairy.getTexture(fairy.getSkin()));
float transp = 1.0F - ((float)fairy.fairyHealth() / (float)(fairy.getMaxHealth()));
if (transp < 0.1F)
{
return -1;
}
setRenderPassModel(fairyModel3);
fairyModel3.flymode = fairy.flymode();
GL11.glColor4f(1.0F, 1.0F, 1.0F, transp);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
return 1;
}
else if (i == 2 && !fairy.queen() && !fairy.normal()) //Render Armor Overlay.
{
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
GL11.glDisable(GL11.GL_BLEND);
if (fairy.rogue())
{
setRenderPassModel(fairyModel5);
loadTexture("/fairy/fairyProps2.png");
fairyModel5.flymode = fairy.flymode();
fairyModel5.retract = 0F;
fairyModel5.isSneak = fairy.getFlag(1);
fairyModel5.sinage = fairy.sinage;
fairyModel5.onGround = fairyModel.onGround;
fairyModel5.venom = fairy.canHeal();
}
else
{
setRenderPassModel(fairyModel2);
loadTexture("/fairy/fairyProps.png");
fairyModel2.flymode = fairy.flymode();
fairyModel2.jobType = fairy.getJob() - 1;
fairyModel2.isSneak = fairy.getFlag(1);
fairyModel2.sinage = fairy.sinage;
fairyModel2.onGround = fairyModel.onGround;
}
return 1;
}
else
{
GL11.glDisable(GL11.GL_BLEND);
return -1;
}
}
protected void passSpecialRender(EntityLiving entityliving, double d, double d1, double d2)
{
renderFairyName((FRY_EntityFairy)entityliving, d, d1, d2);
}
protected void renderFairyName(FRY_EntityFairy fairy, double d, double d1, double d2)
{
if (Minecraft.isGuiEnabled() && fairy != renderManager.livingPlayer)
{
float f = 1.6F;
float f1 = 0.01666667F * f;
float f2 = fairy.getDistanceToEntity(renderManager.livingPlayer);
float f3 = 12F;
if (f2 < f3)
{
String s = fairy.getDisplayName();
if (s != null)
{
renderLivingLabel(fairy, s, d, d1 - (fairy.flymode() ? 1.125D : 0.825D), d2, 64);
}
}
}
}
protected int shouldRenderPass(EntityLiving entityliving, int i, float f)
{
return setFairyBrightness((FRY_EntityFairy)entityliving, i, f);
}
protected void renderLivingLabel(EntityLiving par1EntityLiving, String par2Str, double par3, double par5, double par7, int par9)
{
float f = par1EntityLiving.getDistanceToEntity(renderManager.livingPlayer);
if (f > (float)par9)
{
return;
}
FontRenderer fontrenderer = getFontRendererFromRenderManager();
float f1 = 1.6F;
float f2 = 0.01666667F * f1;
GL11.glPushMatrix();
GL11.glTranslatef((float)par3 + 0.0F, (float)par5 + 2.3F, (float)par7);
GL11.glNormal3f(0.0F, 1.0F, 0.0F);
GL11.glRotatef(-renderManager.playerViewY, 0.0F, 1.0F, 0.0F);
GL11.glRotatef(renderManager.playerViewX, 1.0F, 0.0F, 0.0F);
GL11.glScalef(-f2, -f2, f2);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glDepthMask(false);
GL11.glDisable(GL11.GL_DEPTH_TEST);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
Tessellator tessellator = Tessellator.instance;
byte byte0 = 0;
GL11.glDisable(GL11.GL_TEXTURE_2D);
tessellator.startDrawingQuads();
int i = fontrenderer.getStringWidth(fontrenderer.stripColorCodes(par2Str)) / 2;
tessellator.setColorRGBA_F(0.0F, 0.0F, 0.0F, 0.25F);
tessellator.addVertex(-i - 1, -1 + byte0, 0.0D);
tessellator.addVertex(-i - 1, 8 + byte0, 0.0D);
tessellator.addVertex(i + 1, 8 + byte0, 0.0D);
tessellator.addVertex(i + 1, -1 + byte0, 0.0D);
tessellator.draw();
GL11.glEnable(GL11.GL_TEXTURE_2D);
fontrenderer.drawString(par2Str, -fontrenderer.getStringWidth(fontrenderer.stripColorCodes(par2Str)) / 2, byte0, 0x20ffffff);
GL11.glEnable(GL11.GL_DEPTH_TEST);
GL11.glDepthMask(true);
fontrenderer.drawString(par2Str, -fontrenderer.getStringWidth(fontrenderer.stripColorCodes(par2Str)) / 2, byte0, -1);
GL11.glEnable(GL11.GL_LIGHTING);
GL11.glDisable(GL11.GL_BLEND);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
GL11.glPopMatrix();
}
protected FRY_ModelFairy fairyModel, fairyModel4; //Body and withered overlay
protected FRY_ModelFairyProps fairyModel2; //Clothes and stuff
protected FRY_ModelFairyEyes fairyModel3; //Eyes
protected FRY_ModelFairyProps2 fairyModel5; //Rogue Clothes
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cxf;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import javax.xml.ws.WebFault;
import org.w3c.dom.Element;
import org.apache.camel.AsyncCallback;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.component.cxf.common.message.CxfConstants;
import org.apache.camel.impl.DefaultConsumer;
import org.apache.camel.util.ObjectHelper;
import org.apache.cxf.continuations.Continuation;
import org.apache.cxf.continuations.ContinuationProvider;
import org.apache.cxf.endpoint.Server;
import org.apache.cxf.frontend.ServerFactoryBean;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.message.Exchange;
import org.apache.cxf.message.FaultMode;
import org.apache.cxf.message.Message;
import org.apache.cxf.service.invoker.Invoker;
import org.apache.cxf.service.model.BindingOperationInfo;
import org.apache.cxf.ws.addressing.ContextUtils;
import org.apache.cxf.ws.addressing.EndpointReferenceType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Consumer of exchanges for a service in CXF. CxfConsumer acts a CXF
* service to receive requests, convert them, and forward them to Camel
* route for processing. It is also responsible for converting and sending
* back responses to CXF client.
*
* @version
*/
public class CxfConsumer extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(CxfConsumer.class);
private Server server;
private CxfEndpoint cxfEndpoint;
public CxfConsumer(final CxfEndpoint endpoint, Processor processor) throws Exception {
super(endpoint, processor);
cxfEndpoint = endpoint;
// create server
ServerFactoryBean svrBean = endpoint.createServerFactoryBean();
svrBean.setInvoker(new Invoker() {
// we receive a CXF request when this method is called
public Object invoke(Exchange cxfExchange, Object o) {
LOG.trace("Received CXF Request: {}", cxfExchange);
Continuation continuation;
if (!endpoint.isSynchronous() && isAsyncInvocationSupported(cxfExchange)
&& (continuation = getContinuation(cxfExchange)) != null) {
LOG.trace("Calling the Camel async processors.");
return asyncInvoke(cxfExchange, continuation);
} else {
LOG.trace("Calling the Camel sync processors.");
return syncInvoke(cxfExchange);
}
}
// NOTE this code cannot work with CXF 2.2.x and JMSContinuation
// as it doesn't break out the interceptor chain when we call it
private Object asyncInvoke(Exchange cxfExchange, final Continuation continuation) {
synchronized (continuation) {
if (continuation.isNew()) {
final org.apache.camel.Exchange camelExchange = prepareCamelExchange(cxfExchange);
// Now we don't set up the timeout value
LOG.trace("Suspending continuation of exchangeId: {}", camelExchange.getExchangeId());
// The continuation could be called before the suspend is called
continuation.suspend(cxfEndpoint.getContinuationTimeout());
// use the asynchronous API to process the exchange
getAsyncProcessor().process(camelExchange, new AsyncCallback() {
public void done(boolean doneSync) {
// make sure the continuation resume will not be called before the suspend method in other thread
synchronized (continuation) {
LOG.trace("Resuming continuation of exchangeId: {}", camelExchange.getExchangeId());
// resume processing after both, sync and async callbacks
continuation.setObject(camelExchange);
continuation.resume();
}
}
});
} else if (continuation.isResumed()) {
org.apache.camel.Exchange camelExchange = (org.apache.camel.Exchange)continuation.getObject();
try {
setResponseBack(cxfExchange, camelExchange);
} finally {
CxfConsumer.this.doneUoW(camelExchange);
}
}
}
return null;
}
private Continuation getContinuation(Exchange cxfExchange) {
ContinuationProvider provider =
(ContinuationProvider)cxfExchange.getInMessage().get(ContinuationProvider.class.getName());
Continuation continuation = provider == null ? null : provider.getContinuation();
// Make sure we don't return the JMSContinuation, as it doesn't support the Continuation we wants
// Don't want to introduce the dependency of cxf-rt-transprot-jms here
if (continuation != null && continuation.getClass().getName().equals("org.apache.cxf.transport.jms.continuations.JMSContinuation")) {
return null;
} else {
return continuation;
}
}
private Object syncInvoke(Exchange cxfExchange) {
org.apache.camel.Exchange camelExchange = prepareCamelExchange(cxfExchange);
try {
try {
LOG.trace("Processing +++ START +++");
// send Camel exchange to the target processor
getProcessor().process(camelExchange);
} catch (Exception e) {
throw new Fault(e);
}
LOG.trace("Processing +++ END +++");
setResponseBack(cxfExchange, camelExchange);
} finally {
doneUoW(camelExchange);
}
// response should have been set in outMessage's content
return null;
}
private org.apache.camel.Exchange prepareCamelExchange(Exchange cxfExchange) {
// get CXF binding
CxfEndpoint endpoint = (CxfEndpoint)getEndpoint();
CxfBinding binding = endpoint.getCxfBinding();
// create a Camel exchange, the default MEP is InOut
org.apache.camel.Exchange camelExchange = endpoint.createExchange();
DataFormat dataFormat = endpoint.getDataFormat();
BindingOperationInfo boi = cxfExchange.getBindingOperationInfo();
// make sure the "boi" is remained as wrapped in PAYLOAD mode
if (boi != null && dataFormat == DataFormat.PAYLOAD && boi.isUnwrapped()) {
boi = boi.getWrappedOperation();
cxfExchange.put(BindingOperationInfo.class, boi);
}
if (boi != null) {
camelExchange.setProperty(BindingOperationInfo.class.getName(), boi);
LOG.trace("Set exchange property: BindingOperationInfo: {}", boi);
// set the message exchange patter with the boi
if (boi.getOperationInfo().isOneWay()) {
camelExchange.setPattern(ExchangePattern.InOnly);
}
} else {
if (cxfEndpoint.getExchangePattern().equals(ExchangePattern.InOnly)) {
camelExchange.setPattern(ExchangePattern.InOnly);
}
}
// set data format mode in Camel exchange
camelExchange.setProperty(CxfConstants.DATA_FORMAT_PROPERTY, dataFormat);
LOG.trace("Set Exchange property: {}={}", DataFormat.class.getName(), dataFormat);
camelExchange.setProperty(Message.MTOM_ENABLED, String.valueOf(endpoint.isMtomEnabled()));
if (endpoint.getMergeProtocolHeaders()) {
camelExchange.setProperty(CxfConstants.CAMEL_CXF_PROTOCOL_HEADERS_MERGED, Boolean.TRUE);
}
// bind the CXF request into a Camel exchange
binding.populateExchangeFromCxfRequest(cxfExchange, camelExchange);
// extract the javax.xml.ws header
Map<String, Object> context = new HashMap<String, Object>();
binding.extractJaxWsContext(cxfExchange, context);
// put the context into camelExchange
camelExchange.setProperty(CxfConstants.JAXWS_CONTEXT, context);
// we want to handle the UoW
try {
CxfConsumer.this.createUoW(camelExchange);
} catch (Exception e) {
log.error("Error processing request", e);
throw new Fault(e);
}
return camelExchange;
}
@SuppressWarnings("unchecked")
private void setResponseBack(Exchange cxfExchange, org.apache.camel.Exchange camelExchange) {
CxfEndpoint endpoint = (CxfEndpoint)getEndpoint();
CxfBinding binding = endpoint.getCxfBinding();
checkFailure(camelExchange, cxfExchange);
binding.populateCxfResponseFromExchange(camelExchange, cxfExchange);
// check failure again as fault could be discovered by converter
checkFailure(camelExchange, cxfExchange);
// copy the headers javax.xml.ws header back
binding.copyJaxWsContext(cxfExchange, (Map<String, Object>)camelExchange.getProperty(CxfConstants.JAXWS_CONTEXT));
}
private void checkFailure(org.apache.camel.Exchange camelExchange, Exchange cxfExchange) throws Fault {
final Throwable t;
if (camelExchange.isFailed()) {
org.apache.camel.Message camelMsg = camelExchange.hasOut() ? camelExchange.getOut() : camelExchange.getIn();
if (camelMsg.isFault()) {
t = camelMsg.getBody(Throwable.class);
} else {
t = camelExchange.getException();
}
cxfExchange.getInMessage().put(FaultMode.class, FaultMode.UNCHECKED_APPLICATION_FAULT);
if (t instanceof Fault) {
cxfExchange.getInMessage().put(FaultMode.class, FaultMode.CHECKED_APPLICATION_FAULT);
throw (Fault)t;
} else if (t != null) {
// This is not a CXF Fault. Build the CXF Fault manually.
Fault fault = new Fault(t);
if (fault.getMessage() == null) {
// The Fault has no Message. This is the case if it has
// no message, for example was a NullPointerException.
fault.setMessage(t.getClass().getSimpleName());
}
WebFault faultAnnotation = t.getClass().getAnnotation(WebFault.class);
Object faultInfo = null;
try {
Method method = t.getClass().getMethod("getFaultInfo");
faultInfo = method.invoke(t, new Object[0]);
} catch (Exception e) {
// do nothing here
}
if (faultAnnotation != null && faultInfo == null) {
// t has a JAX-WS WebFault annotation, which describes
// in detail the Web Service Fault that should be thrown. Add the
// detail.
Element detail = fault.getOrCreateDetail();
Element faultDetails = detail.getOwnerDocument()
.createElementNS(faultAnnotation.targetNamespace(), faultAnnotation.name());
detail.appendChild(faultDetails);
}
throw fault;
}
}
}
});
server = svrBean.create();
// Apply the server configurer if it is possible
if (cxfEndpoint.getCxfEndpointConfigurer() != null) {
cxfEndpoint.getCxfEndpointConfigurer().configureServer(server);
}
if (ObjectHelper.isNotEmpty(endpoint.getPublishedEndpointUrl())) {
server.getEndpoint().getEndpointInfo().setProperty("publishedEndpointUrl", endpoint.getPublishedEndpointUrl());
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
server.start();
}
@Override
protected void doStop() throws Exception {
server.stop();
super.doStop();
}
private EndpointReferenceType getReplyTo(Object o) {
try {
return (EndpointReferenceType)o.getClass().getMethod("getReplyTo").invoke(o);
} catch (Throwable t) {
throw new Fault(t);
}
}
protected boolean isAsyncInvocationSupported(Exchange cxfExchange) {
Message cxfMessage = cxfExchange.getInMessage();
Object addressingProperties = cxfMessage.get(CxfConstants.WSA_HEADERS_INBOUND);
if (addressingProperties != null
&& !ContextUtils.isGenericAddress(getReplyTo(addressingProperties))) {
//it's decoupled endpoint, so already switch thread and
//use executors, which means underlying transport won't
//be block, so we shouldn't rely on continuation in
//this case, as the SuspendedInvocationException can't be
//caught by underlying transport. So we should use the SyncInvocation this time
return false;
}
// we assume it should support AsyncInvocation out of box
return true;
}
public Server getServer() {
return server;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.commons.json;
import junit.framework.TestCase;
import org.apache.jackrabbit.oak.commons.StopWatch;
/**
* Test the Jsop tokenizer and builder.
*/
public class JsopTest extends TestCase {
// run the micro-benchmark
public static void main(String... args) {
for (int k = 0; k < 5; k++) {
// String s = "Hello World Hello World Hello World Hello World Hello World Hello World ";
String s = "Hello \"World\" Hello \"World\" Hello \"World\" Hello \"World\" Hello \"World\" Hello \"World\" ";
StopWatch timer = new StopWatch();
int t2 = 0;
for (int i = 0; i < 1000000; i++) {
t2 += JsopBuilder.encode(s).length();
}
System.out.println(timer.seconds() + " dummy: " + t2);
}
// old: not escaped: 5691 ms; escaped: 10609 ms
// new: not escaped: 3931 ms; escaped: 11001 ms
}
public void testDataType() {
String dateString = new JsopBuilder().
key("string").value("/Date(0)/").
key("date").encodedValue("\"\\/Date(0)\\/\"").
toString();
assertEquals(
"\"string\":\"/Date(0)/\"," +
"\"date\":\"\\/Date(0)\\/\"",
dateString);
JsopTokenizer t = new JsopTokenizer(dateString);
assertEquals("string", t.readString());
t.read(':');
assertEquals("/Date(0)/", t.readString());
assertEquals("/Date(0)/", t.getEscapedToken());
t.read(',');
assertEquals("date", t.readString());
t.read(':');
assertEquals("/Date(0)/", t.readString());
assertEquals("\\/Date(0)\\/", t.getEscapedToken());
}
public void testNullTrueFalse() {
JsopTokenizer t;
t = new JsopTokenizer("null, 1, null, true, false");
assertEquals(null, t.read(JsopReader.NULL));
assertEquals(",", t.read(','));
assertEquals("1", t.read(JsopReader.NUMBER));
assertEquals(",", t.read(','));
assertEquals(null, t.read(JsopReader.NULL));
assertEquals(",", t.read(','));
assertEquals("true", t.read(JsopReader.TRUE));
assertEquals(",", t.read(','));
assertEquals("false", t.read(JsopReader.FALSE));
t = new JsopTokenizer("true, false");
assertEquals("true", t.read(JsopReader.TRUE));
assertEquals(",", t.read(','));
assertEquals("false", t.read(JsopReader.FALSE));
t = new JsopTokenizer("false, true");
assertEquals("false", t.read(JsopReader.FALSE));
assertEquals(",", t.read(','));
assertEquals("true", t.read(JsopReader.TRUE));
}
public void testLineLength() {
JsopBuilder buff = new JsopBuilder();
buff.key("hello").value("world");
assertEquals("\"hello\":\"world\"", buff.toString());
assertEquals(15, buff.length());
buff = new JsopBuilder();
buff.setLineLength(10);
buff.key("hello").value("world");
assertEquals("\"hello\":\n\"world\"", buff.toString());
assertEquals(16, buff.length());
}
public void testNumber() {
JsopTokenizer t = new JsopTokenizer("9/3:-3-:-/- 3");
assertEquals("9", t.read(JsopReader.NUMBER));
t.read('/');
assertEquals("3", t.read(JsopReader.NUMBER));
t.read(':');
assertEquals("-3", t.read(JsopReader.NUMBER));
t.read('-');
t.read(':');
t.read('-');
t.read('/');
t.read('-');
t.read(JsopReader.NUMBER);
}
public void testRawValue() {
JsopTokenizer t;
t = new JsopTokenizer("");
try {
t.readRawValue();
fail();
} catch (IllegalArgumentException e) {
// expected
}
t = new JsopTokenizer("[unclosed");
try {
t.readRawValue();
fail();
} catch (IllegalArgumentException e) {
// expected
}
t = new JsopTokenizer("{\"x\": [1], null, true, {\"y\": 1}, [1, 2], [], [[1]], +error+}");
t.read('{');
assertEquals("x", t.readString());
t.read(':');
assertEquals("[1]", t.readRawValue());
t.read(',');
assertEquals("null", t.readRawValue());
t.read(',');
assertEquals("true", t.readRawValue());
t.read(',');
assertEquals("{\"y\": 1}", t.readRawValue());
t.read(',');
assertEquals("[1, 2]", t.readRawValue());
t.read(',');
assertEquals("[]", t.readRawValue());
t.read(',');
assertEquals("[[1]]", t.readRawValue());
t.read(',');
try {
t.readRawValue();
fail();
} catch (IllegalArgumentException e) {
// expected
}
}
public void testTokenizer() {
assertEquals("test", JsopTokenizer.decode("test"));
assertEquals("test", JsopTokenizer.decodeQuoted("\"test\""));
assertEquals("hello\n" + "world", JsopTokenizer.decodeQuoted("\"hello\\n" + "world\""));
try {
JsopTokenizer.decodeQuoted("test");
fail();
} catch (IllegalArgumentException e) {
// ok
}
try {
JsopTokenizer.decode("test\\");
fail();
} catch (IllegalArgumentException e) {
// ok
}
try {
JsopTokenizer.decode("wrong\\uxxxx");
fail();
} catch (IllegalArgumentException e) {
// ok
}
try {
JsopTokenizer.decode("wrong\\m");
fail();
} catch (IllegalArgumentException e) {
// ok
}
test("/error/", "\"\\");
test("/error/1", ".1");
assertEquals("x", new JsopTokenizer("x").toString());
test("/id:truetrue/", "true" + "true");
test("/id:truer/", "truer");
test("/id:falsehood/", "falsehood");
test("/id:nil/", "nil");
test("/id:nil/1", "nil 1");
test("/error/", "\"invalid");
test("- \"test/test\"", "-\"test\\/test\"");
test(" {\n\"x\": 1,\n\"y\": 2\n}\n", "{\"x\":1, \"y\":2}");
test("[true, false, null]", "[true, false, null]");
test("\"\"", "\"\"");
test("\"\\u0000\"", "\"\\u0000\"");
test("\"\\u0001\"", "\"\\u0001\"");
test("\"\\u0002\"", "\"\\u0002\"");
test("\"\\u0003\"", "\"\\u0003\"");
test("\"\\u0004\"", "\"\\u0004\"");
test("\"\\u0005\"", "\"\\u0005\"");
test("\"\\u0006\"", "\"\\u0006\"");
test("\"\\u0007\"", "\"\\u0007\"");
test("\"\\b\"", "\"\\u0008\"");
test("\"\\t\"", "\"\\u0009\"");
test("\"\\n\"", "\"\\u000a\"");
test("\"\\u000b\"", "\"\\u000b\"");
test("\"\\f\"", "\"\\u000c\"");
test("\"\\r\"", "\"\\u000d\"");
test("\"\\u000e\"", "\"\\u000e\"");
test("\"\\u000f\"", "\"\\u000f\"");
test("\"\\u0010\"", "\"\\u0010\"");
test("\"\\u0011\"", "\"\\u0011\"");
test("\"\\u0012\"", "\"\\u0012\"");
test("\"\\u0013\"", "\"\\u0013\"");
test("\"\\u0014\"", "\"\\u0014\"");
test("\"\\u0015\"", "\"\\u0015\"");
test("\"\\u0016\"", "\"\\u0016\"");
test("\"\\u0017\"", "\"\\u0017\"");
test("\"\\u0018\"", "\"\\u0018\"");
test("\"\\u0019\"", "\"\\u0019\"");
test("\"\\u001a\"", "\"\\u001a\"");
test("\"\\u001b\"", "\"\\u001b\"");
test("\"\\u001c\"", "\"\\u001c\"");
test("\"\\u001d\"", "\"\\u001d\"");
test("\"\\u001e\"", "\"\\u001e\"");
test("\"\\u001f\"", "\"\\u001f\"");
test("\"\u0123\"", "\"\\u0123\"");
test("\"\u1234\"", "\"\\u1234\"");
test("\"-\\\\-\\\"-\\b-\\f-\\n-\\r-\\t\"", "\"-\\\\-\\\"-\\b-\\f-\\n-\\r-\\t\"");
test("\"-\\b-\\f-\\n-\\r-\\t\"", "\"-\b-\f-\n-\r-\t\"");
test("[0, 12, -1, 0.1, -0.1, -2.3e1, 1e+1, 1.e-20]", "[0,12,-1,0.1,-0.1,-2.3e1,1e+1,1.e-20]");
test("\"Hello\"", "\"Hello\"");
test("[]", "[]");
test(" {\n\n}\n", "{}");
test(" {\n\"a\": /* test */ 10\n}\n", "{ \"a\": /* test */ 10}");
test("+ - / ^ ", "+ - / ^");
test("/*/ comment /*/ ", "/*/ comment /*/");
test("/**/ /id:comment//**/ ", "/**/ comment /**/");
JsopTokenizer t = new JsopTokenizer("{}123");
assertFalse(t.matches('+'));
assertTrue(t.matches('{'));
t.read('}');
try {
t.read('+');
fail();
} catch (IllegalArgumentException e) {
assertEquals("{}123[*] expected: '+'", e.getMessage());
}
try {
t.read(JsopReader.STRING);
fail();
} catch (IllegalArgumentException e) {
assertEquals("{}123[*] expected: string", e.getMessage());
}
}
public void testSurrogates() {
String[][] tests = { { "surrogate-ok: \uD834\uDD1E", "surrogate-ok: \uD834\uDD1E" },
{ "surrogate-broken: \ud800 ", "surrogate-broken: \\ud800 " },
{ "surrogate-truncated: \ud800", "surrogate-truncated: \\ud800" } };
for (String[] test : tests) {
StringBuilder buff = new StringBuilder();
JsopBuilder.escape(test[0], buff);
assertEquals(test[1], buff.toString());
String s2 = JsopBuilder.encode(test[0]);
assertEquals("\"" + test[1] + "\"", s2);
String s3 = JsopTokenizer.decodeQuoted(s2);
assertEquals(test[0], s3);
}
}
static void test(String expected, String json) {
String j2 = prettyPrintWithErrors(json);
assertEquals(expected, j2);
}
static String prettyPrintWithErrors(String jsop) {
StringBuilder buff = new StringBuilder();
JsopTokenizer t = new JsopTokenizer(jsop);
while (true) {
prettyPrint(buff, t, "");
if (t.getTokenType() == JsopReader.END) {
return buff.toString();
}
}
}
static String prettyPrint(StringBuilder buff, JsopTokenizer t, String ident) {
String space = "";
boolean inArray = false;
while (true) {
switch (t.read()) {
case JsopReader.END:
return buff.toString();
case JsopReader.STRING:
buff.append(JsopBuilder.encode(t.getToken()));
break;
case JsopReader.NUMBER:
buff.append(t.getToken());
break;
case JsopReader.TRUE:
buff.append("true");
break;
case JsopReader.FALSE:
buff.append("false");
break;
case JsopReader.NULL:
buff.append("null");
break;
case JsopReader.ERROR:
buff.append("/error/");
break;
case JsopReader.IDENTIFIER:
buff.append("/id:").append(t.getToken()).append('/');
break;
case JsopReader.COMMENT:
buff.append("/*").append(t.getToken()).append("*/ ");
break;
case '{':
buff.append(" {\n").append(space += ident);
break;
case '}':
space = space.substring(0, space.length() - ident.length());
buff.append('\n').append(space).append("}\n").append(space);
break;
case '[':
inArray = true;
buff.append("[");
break;
case ']':
inArray = false;
buff.append("]");
break;
case ',':
if (!inArray) {
buff.append(",\n").append(space);
} else {
buff.append(", ");
}
break;
case ':':
buff.append(": ");
break;
case '+':
buff.append("+ ");
break;
case '-':
buff.append("- ");
break;
case '^':
buff.append("^ ");
break;
case '/':
buff.append("/ ");
break;
default:
throw new AssertionError("token type: " + t.getTokenType());
}
}
}
public void testBuilder() {
JsopBuilder buff = new JsopBuilder();
buff.tag('+').object().
key("foo").value("bar").
key("int").value(3).
key("decimal").encodedValue("3.0").
key("obj").object().
key("boolean").value(true).
key("null").value(null).
key("arr").array().
array().
value(1).
value("\u001f ~ \u007f \u0080").
value("42").
endArray().
array().
endArray().
endArray().
endObject().
key("some").value("more").
endObject();
String json = buff.toString();
assertEquals("+{\"foo\":\"bar\",\"int\":3,\"decimal\":3.0," +
"\"obj\":{\"boolean\":true,\"null\":null," +
"\"arr\":[[1,\"\\u001f ~ \u007f \u0080\",\"42\"],[]]},\"some\":\"more\"}", json);
buff.resetWriter();
buff.array().
object().key("x").value("1").endObject().newline().
object().key("y").value("2").endObject().newline().
endArray();
json = buff.toString();
assertEquals("[{\"x\":\"1\"}\n,{\"y\":\"2\"}\n]", json);
buff = new JsopBuilder();
buff.tag('+').key("x").value("1").newline();
buff.tag('+').key("y").value("2").newline();
json = buff.toString();
assertEquals("+\"x\":\"1\"\n+\"y\":\"2\"\n", json);
}
public void testEscape() {
assertEquals("null", JsopBuilder.encode(null));
JsopBuilder buff = new JsopBuilder().
key("back\\slash").value("\\").
key("back\\\\slash").value("\\\\");
assertEquals("\"back\\\\slash\":\"\\\\\",\"back\\\\\\\\slash\":\"\\\\\\\\\"", buff.toString());
}
public void testPrettyPrint() {
assertEquals("{}", JsopBuilder.prettyPrint("{}"));
assertEquals("{\n \"a\": 1,\n \"b\": \"Hello\"\n}",
JsopBuilder.prettyPrint("{\"a\":1,\"b\":\"Hello\"}"));
assertEquals("{\n \"a\": [1, 2]\n}",
JsopBuilder.prettyPrint("{\"a\":[1, 2]}"));
}
public static String format(String json) {
return prettyPrint(new StringBuilder(),
new JsopTokenizer(json), " ");
}
}
| |
/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014 The Billing Project, LLC
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.meter.timeline;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.killbill.billing.plugin.meter.timeline.chunks.TimelineChunk;
import org.killbill.billing.plugin.meter.timeline.codec.SampleCoder;
import org.killbill.billing.plugin.meter.timeline.codec.TimelineChunkAccumulator;
import org.killbill.billing.plugin.meter.timeline.persistent.TimelineDao;
import org.killbill.billing.plugin.meter.timeline.samples.NullSample;
import org.killbill.billing.plugin.meter.timeline.samples.RepeatSample;
import org.killbill.billing.plugin.meter.timeline.samples.ScalarSample;
import org.killbill.billing.plugin.meter.timeline.sources.SourceSamplesForTimestamp;
import org.killbill.billing.plugin.meter.timeline.times.TimelineCoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class represents a collection of timeline chunks, one for each
* metric belonging to one event category, each over a specific time period,
* from a single source. This class is used to accumulate samples
* to be written to the database; a separate streaming class with
* much less overhead is used to "play back" the samples read from
* the db in response to queries.
* <p/>
* All subordinate timelines contain the same number of samples.
* <p/>
* When enough samples have accumulated, typically one hour's worth,
* in-memory samples are made into TimelineChunks, one chunk for each metricId
* maintained by the accumulator.
* <p/>
* These new chunks are organized as PendingChunkMaps, kept in a local list and also
* handed off to a PendingChunkMapConsumer to written to the db by a background process. At some
* in the future, that background process will call markPendingChunkMapConsumed(),
* passing the id of a PendingChunkMap. This causes the PendingChunkMap
* to be removed from the local list maintained by the TimelineSourceEventAccumulator.
* <p/>
* Queries that cause the TimelineSourceEventAccumulator instance to return memory
* chunks also return any chunks in PendingChunkMaps in the local list of pending chunks.
*/
public class TimelineSourceEventAccumulator {
private static final Logger log = LoggerFactory.getLogger(TimelineSourceEventAccumulator.class);
private static final DateTimeFormatter dateFormatter = ISODateTimeFormat.dateTime();
private static final NullSample nullSample = new NullSample();
private static final boolean checkEveryAccess = Boolean.parseBoolean(System.getProperty("org.killbill.billing.plugin.meter.checkEveryAccess"));
private static final Random rand = new Random(0);
private final Map<Integer, SampleSequenceNumber> metricIdCounters = new HashMap<Integer, SampleSequenceNumber>();
private final List<PendingChunkMap> pendingChunkMaps = new ArrayList<PendingChunkMap>();
private long pendingChunkMapIdCounter = 1;
private final BackgroundDBChunkWriter backgroundWriter;
private final TimelineCoder timelineCoder;
private final SampleCoder sampleCoder;
private final Integer timelineLengthMillis;
private final int sourceId;
private final int eventCategoryId;
// This is the time when we want to end the chunk. Setting the value randomly
// when the TimelineSourceEventAccumulator is created provides a mechanism to
// distribute the db writes
private DateTime chunkEndTime = null;
private DateTime startTime = null;
private DateTime endTime = null;
private DateTime latestSampleAddTime;
private long sampleSequenceNumber = 0;
private int sampleCount = 0;
/**
* Maps the sample kind id to the accumulator for that sample kind
*/
private final Map<Integer, TimelineChunkAccumulator> timelines = new ConcurrentHashMap<Integer, TimelineChunkAccumulator>();
/**
* Holds the sampling times of the samples
*/
private final List<DateTime> times = new ArrayList<DateTime>();
public TimelineSourceEventAccumulator(final TimelineDao dao, final TimelineCoder timelineCoder, final SampleCoder sampleCoder,
final BackgroundDBChunkWriter backgroundWriter, final int sourceId, final int eventCategoryId,
final DateTime firstSampleTime, final Integer timelineLengthMillis) {
this.timelineLengthMillis = timelineLengthMillis;
this.backgroundWriter = backgroundWriter;
this.timelineCoder = timelineCoder;
this.sampleCoder = sampleCoder;
this.sourceId = sourceId;
this.eventCategoryId = eventCategoryId;
// Set the end-of-chunk time by tossing a random number, to evenly distribute the db writeback load.
this.chunkEndTime = timelineLengthMillis != null ? firstSampleTime.plusMillis(rand.nextInt(timelineLengthMillis)) : null;
}
/*
* This constructor is used for testing; it writes chunks as soon as they are
* created, but because the chunkEndTime is way in the future, doesn't initiate
* chunk writes.
*/
public TimelineSourceEventAccumulator(final TimelineDao timelineDAO, final TimelineCoder timelineCoder, final SampleCoder sampleCoder,
final Integer sourceId, final int eventTypeId, final DateTime firstSampleTime) {
this(timelineDAO, timelineCoder, sampleCoder, new BackgroundDBChunkWriter(timelineDAO, null, true), sourceId, eventTypeId, firstSampleTime, Integer.MAX_VALUE);
}
@SuppressWarnings("unchecked")
// TODO - we can probably do better than synchronize the whole method
public synchronized void addSourceSamples(final SourceSamplesForTimestamp samples) {
final DateTime timestamp = samples.getTimestamp();
if (chunkEndTime != null && chunkEndTime.isBefore(timestamp)) {
extractAndQueueTimelineChunks();
startTime = timestamp;
chunkEndTime = timestamp.plusMillis(timelineLengthMillis);
}
if (startTime == null) {
startTime = timestamp;
}
if (endTime == null) {
endTime = timestamp;
} else if (timestamp.isBefore(endTime)) {
// Note: we allow multiple events at the same time
// TODO Do we really want that?
log.warn("Adding samples for source {}, timestamp {} is before the end time {}; ignored",
new Object[]{sourceId, dateFormatter.print(timestamp), dateFormatter.print(endTime)});
return;
}
sampleSequenceNumber++;
latestSampleAddTime = new DateTime();
for (final Map.Entry<Integer, ScalarSample> entry : samples.getSamples().entrySet()) {
final Integer metricId = entry.getKey();
final SampleSequenceNumber counter = metricIdCounters.get(metricId);
if (counter != null) {
counter.setSequenceNumber(sampleSequenceNumber);
} else {
metricIdCounters.put(metricId, new SampleSequenceNumber(sampleSequenceNumber));
}
final ScalarSample sample = entry.getValue();
TimelineChunkAccumulator timeline = timelines.get(metricId);
if (timeline == null) {
timeline = new TimelineChunkAccumulator(sourceId, metricId, sampleCoder);
if (sampleCount > 0) {
addPlaceholders(timeline, sampleCount);
}
timelines.put(metricId, timeline);
}
final ScalarSample compressedSample = sampleCoder.compressSample(sample);
timeline.addSample(compressedSample);
}
for (final Map.Entry<Integer, SampleSequenceNumber> entry : metricIdCounters.entrySet()) {
final SampleSequenceNumber counter = entry.getValue();
if (counter.getSequenceNumber() < sampleSequenceNumber) {
counter.setSequenceNumber(sampleSequenceNumber);
final int metricId = entry.getKey();
final TimelineChunkAccumulator timeline = timelines.get(metricId);
timeline.addSample(nullSample);
}
}
// Now we can update the state
endTime = timestamp;
sampleCount++;
times.add(timestamp);
if (checkEveryAccess) {
checkSampleCounts(sampleCount);
}
}
private void addPlaceholders(final TimelineChunkAccumulator timeline, int countToAdd) {
final int maxRepeatSamples = RepeatSample.MAX_SHORT_REPEAT_COUNT;
while (countToAdd >= maxRepeatSamples) {
timeline.addPlaceholder((byte) maxRepeatSamples);
countToAdd -= maxRepeatSamples;
}
if (countToAdd > 0) {
timeline.addPlaceholder((byte) countToAdd);
}
}
/**
* This method queues a map of TimelineChunks extracted from the TimelineChunkAccumulators
* to be written to the db. When memory chunks are requested, any queued chunk will be included
* in the list.
*/
public synchronized void extractAndQueueTimelineChunks() {
if (times.size() > 0) {
final Map<Integer, TimelineChunk> chunkMap = new HashMap<Integer, TimelineChunk>();
final byte[] timeBytes = timelineCoder.compressDateTimes(times);
for (final Map.Entry<Integer, TimelineChunkAccumulator> entry : timelines.entrySet()) {
final int metricId = entry.getKey();
final TimelineChunkAccumulator accumulator = entry.getValue();
final TimelineChunk chunk = accumulator.extractTimelineChunkAndReset(startTime, endTime, timeBytes);
chunkMap.put(metricId, chunk);
}
times.clear();
sampleCount = 0;
final long counter = pendingChunkMapIdCounter++;
final PendingChunkMap newChunkMap = new PendingChunkMap(this, counter, chunkMap);
pendingChunkMaps.add(newChunkMap);
backgroundWriter.addPendingChunkMap(newChunkMap);
}
}
public synchronized void markPendingChunkMapConsumed(final long pendingChunkMapId) {
final PendingChunkMap pendingChunkMap = pendingChunkMaps.size() > 0 ? pendingChunkMaps.get(0) : null;
if (pendingChunkMap == null) {
log.error("In TimelineSourceEventAccumulator.markPendingChunkMapConsumed(), could not find the map for {}", pendingChunkMapId);
} else if (pendingChunkMapId != pendingChunkMap.getPendingChunkMapId()) {
log.error("In TimelineSourceEventAccumulator.markPendingChunkMapConsumed(), the next map has id {}, but we're consuming id {}",
pendingChunkMap.getPendingChunkMapId(), pendingChunkMapId);
} else {
pendingChunkMaps.remove(0);
}
}
public synchronized Collection<TimelineChunk> getInMemoryTimelineChunks(final List<Integer> metricIds) throws IOException {
final List<TimelineChunk> timelineChunks = new ArrayList<TimelineChunk>();
// Get all the older chunks from the staging area of the BackgroundDBChunkWriter
for (final PendingChunkMap pendingChunkMap : pendingChunkMaps) {
for (final Integer metricId : metricIds) {
final TimelineChunk timelineChunkForMetricId = pendingChunkMap.getChunkMap().get(metricId);
if (timelineChunkForMetricId != null) {
timelineChunks.add(timelineChunkForMetricId);
}
}
}
// Get the data in this accumulator, not yet in the staging area
// This is very similar to extractAndQueueTimelineChunks() above, but without changing the global state
final byte[] timeBytes = timelineCoder.compressDateTimes(times);
for (final Integer metricId : metricIds) {
final TimelineChunkAccumulator chunkAccumulator = timelines.get(metricId);
if (chunkAccumulator != null) {
// Extract the timeline for this chunk by copying it and reading encoded bytes
final TimelineChunkAccumulator chunkAccumulatorCopy = chunkAccumulator.deepCopy();
final TimelineChunk timelineChunk = chunkAccumulatorCopy.extractTimelineChunkAndReset(startTime, endTime, timeBytes);
timelineChunks.add(timelineChunk);
}
}
return timelineChunks;
}
/**
* Make sure all timelines have the sample count passed in; otherwise log
* discrepancies and return false
*
* @param assertedCount The sample count that all timelines are supposed to have
* @return true if all timelines have the right count; false otherwise
*/
public boolean checkSampleCounts(final int assertedCount) {
boolean success = true;
if (assertedCount != sampleCount) {
log.error("For host {}, start time {}, the SourceTimeLines sampleCount {} is not equal to the assertedCount {}",
new Object[]{sourceId, dateFormatter.print(startTime), sampleCount, assertedCount});
success = false;
}
for (final Map.Entry<Integer, TimelineChunkAccumulator> entry : timelines.entrySet()) {
final int metricId = entry.getKey();
final TimelineChunkAccumulator timeline = entry.getValue();
final int lineSampleCount = timeline.getSampleCount();
if (lineSampleCount != assertedCount) {
log.error("For host {}, start time {}, sample kind id {}, the sampleCount {} is not equal to the assertedCount {}",
new Object[]{sourceId, dateFormatter.print(startTime), metricId, lineSampleCount, assertedCount});
success = false;
}
}
return success;
}
public int getSourceId() {
return sourceId;
}
public int getEventCategoryId() {
return eventCategoryId;
}
public DateTime getStartTime() {
return startTime;
}
public DateTime getEndTime() {
return endTime;
}
public Map<Integer, TimelineChunkAccumulator> getTimelines() {
return timelines;
}
public List<DateTime> getTimes() {
return times;
}
public DateTime getLatestSampleAddTime() {
return latestSampleAddTime;
}
private static class SampleSequenceNumber {
private long sequenceNumber;
public SampleSequenceNumber(final long sequenceNumber) {
this.sequenceNumber = sequenceNumber;
}
public long getSequenceNumber() {
return sequenceNumber;
}
public void setSequenceNumber(final long sequenceNumber) {
this.sequenceNumber = sequenceNumber;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.aggregate.tarfile;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import org.apache.camel.Exchange;
import org.apache.camel.WrappedFile;
import org.apache.camel.component.file.FileConsumer;
import org.apache.camel.component.file.GenericFile;
import org.apache.camel.component.file.GenericFileMessage;
import org.apache.camel.component.file.GenericFileOperationFailedException;
import org.apache.camel.processor.aggregate.AggregationStrategy;
import org.apache.camel.spi.Synchronization;
import org.apache.camel.util.FileUtil;
import org.apache.camel.util.IOHelper;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.utils.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This aggregation strategy will aggregate all incoming messages into a TAR file.
* <p>If the incoming exchanges contain {@link GenericFileMessage} file name will
* be taken from the body otherwise the body content will be treated as a byte
* array and the TAR entry will be named using the message id (unless the flag
* useFilenameHeader is set to true.</p>
* <p><b>NOTE 1:</b> Please note that this aggregation strategy requires eager
* completion check to work properly.</p>
*
* <p><b>NOTE 2:</b> This implementation is very inefficient especially on big files since the tar
* file is completely rewritten for each file that is added to it. Investigate if the
* files can be collected and at completion stored to tar file.</p>
*/
public class TarAggregationStrategy implements AggregationStrategy {
private static final Logger LOG = LoggerFactory.getLogger(TarAggregationStrategy.class);
private String filePrefix;
private String fileSuffix = ".tar";
private boolean preserveFolderStructure;
private boolean useFilenameHeader;
private File parentDir = new File(System.getProperty("java.io.tmpdir"));
public TarAggregationStrategy() {
this(false, false);
}
/**
* @param preserveFolderStructure if true, the folder structure is preserved when the source is
* a type of {@link GenericFileMessage}. If used with a file, use recursive=true.
*/
public TarAggregationStrategy(boolean preserveFolderStructure) {
this(preserveFolderStructure, false);
}
/**
* @param preserveFolderStructure if true, the folder structure is preserved when the source is
* a type of {@link GenericFileMessage}. If used with a file, use recursive=true.
* @param useFilenameHeader if true, the filename header will be used to name aggregated byte arrays
* within the TAR file.
*/
public TarAggregationStrategy(boolean preserveFolderStructure, boolean useFilenameHeader) {
this.preserveFolderStructure = preserveFolderStructure;
this.useFilenameHeader = useFilenameHeader;
}
public String getFilePrefix() {
return filePrefix;
}
/**
* Sets the prefix that will be used when creating the TAR filename.
*/
public void setFilePrefix(String filePrefix) {
this.filePrefix = filePrefix;
}
public String getFileSuffix() {
return fileSuffix;
}
/**
* Sets the suffix that will be used when creating the ZIP filename.
*/
public void setFileSuffix(String fileSuffix) {
this.fileSuffix = fileSuffix;
}
public File getParentDir() {
return parentDir;
}
/**
* Sets the parent directory to use for writing temporary files.
*/
public void setParentDir(File parentDir) {
this.parentDir = parentDir;
}
/**
* Sets the parent directory to use for writing temporary files.
*/
public void setParentDir(String parentDir) {
this.parentDir = new File(parentDir);
}
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
File tarFile;
Exchange answer = oldExchange;
// Guard against empty new exchanges
if (newExchange == null) {
return oldExchange;
}
// First time for this aggregation
if (oldExchange == null) {
try {
tarFile = FileUtil.createTempFile(this.filePrefix, this.fileSuffix, parentDir);
LOG.trace("Created temporary file: {}", tarFile);
} catch (IOException e) {
throw new GenericFileOperationFailedException(e.getMessage(), e);
}
answer = newExchange;
answer.addOnCompletion(new DeleteTarFileOnCompletion(tarFile));
} else {
tarFile = oldExchange.getIn().getBody(File.class);
}
Object body = newExchange.getIn().getBody();
if (body instanceof WrappedFile) {
body = ((WrappedFile) body).getFile();
}
if (body instanceof File) {
try {
File appendFile = (File) body;
// do not try to append empty files
if (appendFile.length() > 0) {
String entryName = preserveFolderStructure ? newExchange.getIn().getHeader(Exchange.FILE_NAME, String.class) : newExchange.getIn().getMessageId();
addFileToTar(tarFile, appendFile, this.preserveFolderStructure ? entryName : null);
GenericFile<File> genericFile =
FileConsumer.asGenericFile(
tarFile.getParent(), tarFile, Charset.defaultCharset().toString(), false);
genericFile.bindToExchange(answer);
}
} catch (Exception e) {
throw new GenericFileOperationFailedException(e.getMessage(), e);
}
} else {
// Handle all other messages
try {
byte[] buffer = newExchange.getIn().getMandatoryBody(byte[].class);
// do not try to append empty data
if (buffer.length > 0) {
String entryName = useFilenameHeader ? newExchange.getIn().getHeader(Exchange.FILE_NAME, String.class) : newExchange.getIn().getMessageId();
addEntryToTar(tarFile, entryName, buffer, buffer.length);
GenericFile<File> genericFile = FileConsumer.asGenericFile(
tarFile.getParent(), tarFile, Charset.defaultCharset().toString(), false);
genericFile.bindToExchange(answer);
}
} catch (Exception e) {
throw new GenericFileOperationFailedException(e.getMessage(), e);
}
}
return answer;
}
private void addFileToTar(File source, File file, String fileName) throws IOException, ArchiveException {
File tmpTar = File.createTempFile(source.getName(), null, parentDir);
tmpTar.delete();
if (!source.renameTo(tmpTar)) {
throw new IOException("Could not make temp file (" + source.getName() + ")");
}
FileInputStream fis = new FileInputStream(tmpTar);
TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, fis);
TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
InputStream in = new FileInputStream(file);
// copy the existing entries
ArchiveEntry nextEntry;
while ((nextEntry = tin.getNextEntry()) != null) {
tos.putArchiveEntry(nextEntry);
IOUtils.copy(tin, tos);
tos.closeArchiveEntry();
}
// Add the new entry
TarArchiveEntry entry = new TarArchiveEntry(fileName == null ? file.getName() : fileName);
entry.setSize(file.length());
tos.putArchiveEntry(entry);
IOUtils.copy(in, tos);
tos.closeArchiveEntry();
IOHelper.close(fis, in, tin, tos);
LOG.trace("Deleting temporary file: {}", tmpTar);
FileUtil.deleteFile(tmpTar);
}
private void addEntryToTar(File source, String entryName, byte[] buffer, int length) throws IOException, ArchiveException {
File tmpTar = File.createTempFile(source.getName(), null, parentDir);
tmpTar.delete();
if (!source.renameTo(tmpTar)) {
throw new IOException("Cannot create temp file: " + source.getName());
}
FileInputStream fis = new FileInputStream(tmpTar);
TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, fis);
TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
// copy the existing entries
ArchiveEntry nextEntry;
while ((nextEntry = tin.getNextEntry()) != null) {
tos.putArchiveEntry(nextEntry);
IOUtils.copy(tin, tos);
tos.closeArchiveEntry();
}
// Create new entry
TarArchiveEntry entry = new TarArchiveEntry(entryName);
entry.setSize(length);
tos.putArchiveEntry(entry);
tos.write(buffer, 0, length);
tos.closeArchiveEntry();
IOHelper.close(fis, tin, tos);
LOG.trace("Deleting temporary file: {}", tmpTar);
FileUtil.deleteFile(tmpTar);
}
/**
* This callback class is used to clean up the temporary TAR file once the exchange has completed.
*/
private class DeleteTarFileOnCompletion implements Synchronization {
private final File fileToDelete;
DeleteTarFileOnCompletion(File fileToDelete) {
this.fileToDelete = fileToDelete;
}
@Override
public void onFailure(Exchange exchange) {
// Keep the file if something gone a miss.
}
@Override
public void onComplete(Exchange exchange) {
LOG.debug("Deleting tar file on completion: {}", this.fileToDelete);
FileUtil.deleteFile(this.fileToDelete);
}
}
}
| |
package net.minecraft.src;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.network.packet.Packet;
import net.minecraft.network.packet.Packet250CustomPayload;
public class YC_ResearchesData {
public static int C_RESEARCHES = 12; // TODO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
public List researches = new ArrayList();
public int fire = 0, nature = 0, water = 0, ender = 0;
public String PlayerName = "";
public int researchLevel = 0;//number of researches commited
public int techLevel = 0;//tech level, gained from astral
public YC_ResearchesData(String name)
{
PlayerName = name;
Init();
}
public void Init()
{
//lava generator
YC_ResearchData r = new YC_ResearchData(28, 5, 22, 0, 0, new int[]{0,0,1,0,1}, 85, 36,
new String[]{"A simple device","that generates","lava"},
"/YC/Researches/lavaGenerator.png", 181, 102, new int[]{}, new int[]{0});
researches.add(r);
//rain stopper
r = new YC_ResearchData(4, 35, 38, 0, 1, new int[]{1,0,2,0,1}, 105, 18,
new String[]{"Device to stop rain"},
"/YC/Researches/rainStopper.png", 178, 100, new int[]{0}, new int[]{0});
researches.add(r);
//rain starter
r = new YC_ResearchData(52, 35, 54, 0, 2, new int[]{1,1,1,0,1}, 108, 18,
new String[]{"Device to start rain"},
"/YC/Researches/rainStarter.png", 179, 102, new int[]{0}, new int[]{0});
researches.add(r);
//digger
r = new YC_ResearchData(95, 5, 70, 0, 3, new int[]{2,0,0,1,1}, 142, 36,
new String[]{"Advanced machine that","digs out useful resources","leaving terrain unharmed"},
"/YC/Researches/digger.png", 184, 104, new int[]{}, new int[]{4});
researches.add(r);
//crystalizer
r = new YC_ResearchData(95, 35, 86, 0, 4, new int[]{2,1,2,0,1}, 116, 36,
new String[]{"Machine to compress","8 coal and a diamond","into crystals"},
"/YC/Researches/crystalizer.png", 180, 104, new int[]{3}, new int[]{3});
researches.add(r);
//astral teleporter
r = new YC_ResearchData(156, 5, 102, 0, 5, new int[]{1,1,1,3,1}, 150, 36,
new String[]{"Device that can rip","through space to teleport","it's owner to a distant place"},
"/YC/Researches/astralTeleporter.png", 182, 102, new int[]{}, new int[]{});
researches.add(r);
//advanced astral teleporter
r = new YC_ResearchData(136, 65, 118, 0, 6, new int[]{2,2,2,4,2}, 155, 36,
new String[]{"Addition to Astral Teleporter.","Allows to create more stable","passages in space."},
"/YC/Researches/advAstTel.png", 179, 103, new int[]{5}, new int[]{3});
researches.add(r);
//astral retransmitter
r = new YC_ResearchData(177, 65, 134, 0, 7, new int[]{2,2,2,5,2}, 145, 36,
new String[]{"Allows you to link yourself","with a different island in","astral."},
"/YC/Researches/astralRetransmitter.png", 179, 103, new int[]{5}, new int[]{2});
researches.add(r);
//chest
r = new YC_ResearchData(156, 95, 150, 0, 8, new int[]{3,2,5,8,3}, 155, 36,
new String[]{"A chest that has no storrage","limit! Can hold as many items,","as you put."},
"/YC/Researches/chest.png", 179, 103, new int[]{5}, new int[]{1});
researches.add(r);
//gravity explosion grenade
r = new YC_ResearchData(4, 65, 166, 0, 9, new int[]{4,3,4,5,2}, 155, 27,
new String[]{"Pushes everything away from","it when explodes."},
"/YC/Researches/GravExpGren.png", 179, 103, new int[]{}, new int[]{});
researches.add(r);
//gravity implosion grenade
r = new YC_ResearchData(52, 65, 182, 0, 10, new int[]{4,3,4,5,2}, 150, 27,
new String[]{"Pushes everything towards","it when explodes."},
"/YC/Researches/GravImpGren.png", 179, 103, new int[]{}, new int[]{});
researches.add(r);
//gravity implosion grenade
r = new YC_ResearchData(4, 95, 198, 0, 11, new int[]{6,2,6,3,3}, 123, 36,
new String[]{"Device that, when","activated, scares away","Ghasts"},
"/YC/Researches/subEmitter.png", 179, 103, new int[]{9}, new int[]{});
researches.add(r);
//C_RESEARCHES++; !!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!++++!!!!
}
public int GetIndexByCoord(int x, int y)
{
for(int i = 0; i<researches.size(); i++)
{
if (((YC_ResearchData)researches.get(i)).IsPointIn(x, y)) return i;
}
return -1;
}
public Packet getDescriptionPacket() {
ByteArrayOutputStream bos = new ByteArrayOutputStream(8);
DataOutputStream outputStream = new DataOutputStream(bos);
try {
int[] t = buildIntDataList();
for(int i = 0; i<t.length; i++)
{
outputStream.writeInt(t[i]);
}
} catch (Exception ex) {
ex.printStackTrace();
}
Packet250CustomPayload packet = new Packet250CustomPayload();
packet.channel = "YC_Researched";
packet.data = bos.toByteArray();
packet.length = bos.size();
return packet;
}
public int[] buildIntDataList() {
int[] sortList = new int[4+257 + C_RESEARCHES + 2];
sortList[0] = fire;
sortList[1] = water;
sortList[2] = nature;
sortList[3] = ender;
char[] t = PlayerName.toCharArray();
sortList[4]=t.length;
for(int i = 0; i<t.length; i++)
{
sortList[5+i]=t[i];
}
for(int i = 0; i<C_RESEARCHES; i++)
{
sortList[261+i] = ((YC_ResearchData)researches.get(i)).researched ? 1 : 0;
}
sortList[sortList.length - 2] = techLevel;
sortList[sortList.length - 1] = researchLevel;
return sortList;
}
public void HandleIntData(int[] intData)
{
fire = intData[0];
water = intData[1];
nature = intData[2];
ender = intData[3];
for(int i = 0; i<C_RESEARCHES; i++)
{
if (intData[261+i] == 1) ((YC_ResearchData)researches.get(i)).researched = true;
else ((YC_ResearchData)researches.get(i)).researched = false;
}
techLevel = intData[intData.length - 2];
researchLevel = intData[intData.length - 1];
}
public void IncVars(int w, int f, int n, int e)
{
water += w;
fire += f;
nature += n;
ender += e;
}
public String GetResearchDescription()
{
String s = "";
for (int i = 0; i < C_RESEARCHES; i++)
{
s=s+(((YC_ResearchData)researches.get(i)).researched ? "1" : "0");
}
return s;
}
public void DecypherResearches(String s)
{
char[] a = s.toCharArray();
for(int i = 0; i<s.length(); i++)
{
((YC_ResearchData)researches.get(i)).researched = (a[i] == '1' ? true : false);
}
}
public void Save(PrintWriter writer)
{
writer.write(PlayerName + (char)4);
writer.write(String.valueOf(fire));
writer.write((char)4);
writer.write(String.valueOf(nature));
writer.write((char)4);
writer.write(String.valueOf(water));
writer.write((char)4);
writer.write(String.valueOf(ender));
writer.write((char)4);
writer.write(String.valueOf(researchLevel));
writer.write((char)4);
writer.write(String.valueOf(techLevel));
writer.write((char)4);
writer.write(GetResearchDescription());
writer.println();
}
public void Print()
{/*
System.out.print(PlayerName);
System.out.print(" ; ");
System.out.print(x);
System.out.print(" ; ");
System.out.print(y);
System.out.print(" ; ");
System.out.print(z);
System.out.print(" ; ");
System.out.print(AstralXPos);
System.out.print(" ; ");
System.out.print(AstralYPos);
System.out.print(" ; ");
System.out.print(AstralZPos);
System.out.println("");*/
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.nfa.sharedbuffer;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.typeutils.CompositeTypeSerializerSnapshot;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.api.common.typeutils.base.ListSerializer;
import org.apache.flink.api.common.typeutils.base.TypeSerializerSingleton;
import org.apache.flink.cep.nfa.sharedbuffer.SharedBufferEdge.SharedBufferEdgeSerializer;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.state.KeyedStateBackend;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** An entry in {@link SharedBuffer} that allows to store relations between different entries. */
public class SharedBufferNode {
private final List<Lockable<SharedBufferEdge>> edges;
public SharedBufferNode() {
edges = new ArrayList<>();
}
SharedBufferNode(List<Lockable<SharedBufferEdge>> edges) {
this.edges = edges;
}
public List<Lockable<SharedBufferEdge>> getEdges() {
return edges;
}
public void addEdge(SharedBufferEdge edge) {
edges.add(new Lockable<>(edge, 0));
}
@Override
public String toString() {
return "SharedBufferNode{" + "edges=" + edges + '}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SharedBufferNode that = (SharedBufferNode) o;
return Objects.equals(edges, that.edges);
}
@Override
public int hashCode() {
return Objects.hash(edges);
}
/**
* Serializer for {@link SharedBufferNode}.
*
* <p>This serializer had to be deprecated and you cannot directly migrate to the newer version.
* The new structure requires additional information from other nodes. The migration happens in
* {@link SharedBuffer#migrateOldState(KeyedStateBackend, ValueState)}.
*
* @deprecated was used in <= 1.12, use {@link
* org.apache.flink.cep.nfa.sharedbuffer.SharedBufferNodeSerializer} instead.
*/
@Deprecated
public static class SharedBufferNodeSerializer
extends TypeSerializerSingleton<SharedBufferNode> {
private static final long serialVersionUID = -6687780732295439832L;
private final ListSerializer<SharedBufferEdge> edgesSerializer;
public SharedBufferNodeSerializer() {
this.edgesSerializer = new ListSerializer<>(new SharedBufferEdgeSerializer());
}
private SharedBufferNodeSerializer(ListSerializer<SharedBufferEdge> edgesSerializer) {
this.edgesSerializer = checkNotNull(edgesSerializer);
}
@Override
public boolean isImmutableType() {
return false;
}
@Override
public SharedBufferNode createInstance() {
return new SharedBufferNode(new ArrayList<>());
}
@Override
public SharedBufferNode copy(SharedBufferNode from) {
throw new UnsupportedOperationException("Should not be used");
}
@Override
public SharedBufferNode copy(SharedBufferNode from, SharedBufferNode reuse) {
return copy(from);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(SharedBufferNode record, DataOutputView target) throws IOException {
throw new UnsupportedOperationException("We should no longer use it for serialization");
}
@Override
public SharedBufferNode deserialize(DataInputView source) throws IOException {
List<SharedBufferEdge> edges = edgesSerializer.deserialize(source);
SharedBufferNode node = new SharedBufferNode();
for (SharedBufferEdge edge : edges) {
node.addEdge(edge);
}
return node;
}
@Override
public SharedBufferNode deserialize(SharedBufferNode reuse, DataInputView source)
throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
edgesSerializer.copy(source, target);
}
// -----------------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<SharedBufferNode> snapshotConfiguration() {
return new SharedBufferNodeSerializerSnapshot(this);
}
/** Serializer configuration snapshot for compatibility and format evolution. */
@SuppressWarnings("WeakerAccess")
public static final class SharedBufferNodeSerializerSnapshot
extends CompositeTypeSerializerSnapshot<
SharedBufferNode, SharedBufferNodeSerializer> {
private static final int VERSION = 1;
public SharedBufferNodeSerializerSnapshot() {
super(SharedBufferNodeSerializer.class);
}
public SharedBufferNodeSerializerSnapshot(
SharedBufferNodeSerializer sharedBufferNodeSerializer) {
super(sharedBufferNodeSerializer);
}
@Override
protected int getCurrentOuterSnapshotVersion() {
return VERSION;
}
@Override
@SuppressWarnings("unchecked")
protected SharedBufferNodeSerializer createOuterSerializerWithNestedSerializers(
TypeSerializer<?>[] nestedSerializers) {
return new SharedBufferNodeSerializer(
(ListSerializer<SharedBufferEdge>) nestedSerializers[0]);
}
@Override
protected TypeSerializer<?>[] getNestedSerializers(
SharedBufferNodeSerializer outerSerializer) {
return new TypeSerializer<?>[] {outerSerializer.edgesSerializer};
}
}
}
}
| |
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
// -- This file was mechanically generated: Do not edit! -- //
package java.nio;
import java.io.FileDescriptor;
import sun.misc.Cleaner;
import sun.misc.Unsafe;
import sun.misc.VM;
import sun.nio.ch.DirectBuffer;
class DirectLongBufferU
extends LongBuffer
implements DirectBuffer
{
// Cached unsafe-access object
protected static final Unsafe unsafe = Bits.unsafe();
// Cached array base offset
private static final long arrayBaseOffset = (long)unsafe.arrayBaseOffset(long[].class);
// Cached unaligned-access capability
protected static final boolean unaligned = Bits.unaligned();
// Base address, used in all indexing calculations
// NOTE: moved up to Buffer.java for speed in JNI GetDirectBufferAddress
// protected long address;
// An object attached to this buffer. If this buffer is a view of another
// buffer then we use this field to keep a reference to that buffer to
// ensure that its memory isn't freed before we are done with it.
private final Object att;
public Object attachment() {
return att;
}
public Cleaner cleaner() { return null; }
// For duplicates and slices
//
DirectLongBufferU(DirectBuffer db, // package-private
int mark, int pos, int lim, int cap,
int off)
{
super(mark, pos, lim, cap);
address = db.address() + off;
att = db;
}
public LongBuffer slice() {
int pos = this.position();
int lim = this.limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
int off = (pos << 3);
assert (off >= 0);
return new DirectLongBufferU(this, -1, 0, rem, rem, off);
}
public LongBuffer duplicate() {
return new DirectLongBufferU(this,
this.markValue(),
this.position(),
this.limit(),
this.capacity(),
0);
}
public LongBuffer asReadOnlyBuffer() {
return new DirectLongBufferRU(this,
this.markValue(),
this.position(),
this.limit(),
this.capacity(),
0);
}
public long address() {
return address;
}
private long ix(int i) {
return address + ((long)i << 3);
}
public long get() {
return ((unsafe.getLong(ix(nextGetIndex()))));
}
public long get(int i) {
return ((unsafe.getLong(ix(checkIndex(i)))));
}
public LongBuffer get(long[] dst, int offset, int length) {
if (((long)length << 3) > Bits.JNI_COPY_TO_ARRAY_THRESHOLD) {
checkBounds(offset, length, dst.length);
int pos = position();
int lim = limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
if (length > rem)
throw new BufferUnderflowException();
if (order() != ByteOrder.nativeOrder())
Bits.copyToLongArray(ix(pos), dst,
(long)offset << 3,
(long)length << 3);
else
Bits.copyToArray(ix(pos), dst, arrayBaseOffset,
(long)offset << 3,
(long)length << 3);
position(pos + length);
} else {
super.get(dst, offset, length);
}
return this;
}
public LongBuffer put(long x) {
unsafe.putLong(ix(nextPutIndex()), ((x)));
return this;
}
public LongBuffer put(int i, long x) {
unsafe.putLong(ix(checkIndex(i)), ((x)));
return this;
}
public LongBuffer put(LongBuffer src) {
if (src instanceof DirectLongBufferU) {
if (src == this)
throw new IllegalArgumentException();
DirectLongBufferU sb = (DirectLongBufferU)src;
int spos = sb.position();
int slim = sb.limit();
assert (spos <= slim);
int srem = (spos <= slim ? slim - spos : 0);
int pos = position();
int lim = limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
if (srem > rem)
throw new BufferOverflowException();
unsafe.copyMemory(sb.ix(spos), ix(pos), (long)srem << 3);
sb.position(spos + srem);
position(pos + srem);
} else if (src.hb != null) {
int spos = src.position();
int slim = src.limit();
assert (spos <= slim);
int srem = (spos <= slim ? slim - spos : 0);
put(src.hb, src.offset + spos, srem);
src.position(spos + srem);
} else {
super.put(src);
}
return this;
}
public LongBuffer put(long[] src, int offset, int length) {
if (((long)length << 3) > Bits.JNI_COPY_FROM_ARRAY_THRESHOLD) {
checkBounds(offset, length, src.length);
int pos = position();
int lim = limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
if (length > rem)
throw new BufferOverflowException();
if (order() != ByteOrder.nativeOrder())
Bits.copyFromLongArray(src,
(long)offset << 3,
ix(pos),
(long)length << 3);
else
Bits.copyFromArray(src, arrayBaseOffset,
(long)offset << 3,
ix(pos),
(long)length << 3);
position(pos + length);
} else {
super.put(src, offset, length);
}
return this;
}
public LongBuffer compact() {
int pos = position();
int lim = limit();
assert (pos <= lim);
int rem = (pos <= lim ? lim - pos : 0);
unsafe.copyMemory(ix(pos), ix(0), (long)rem << 3);
position(rem);
limit(capacity());
discardMark();
return this;
}
public boolean isDirect() {
return true;
}
public boolean isReadOnly() {
return false;
}
public ByteOrder order() {
return ((ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN)
? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN);
}
}
| |
/*
* This file is part of dtmlibs.
*
* Copyright (c) 2017 Jeremy Wood
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package dtmlibs.config.examples;
import dtmlibs.config.util.ObjectStringifier;
import org.jetbrains.annotations.NotNull;
import dtmlibs.config.annotation.Comment;
import dtmlibs.config.annotation.Description;
import dtmlibs.config.annotation.HandlePropertyWith;
import dtmlibs.config.annotation.Immutable;
import dtmlibs.config.annotation.SerializableAs;
import dtmlibs.config.annotation.SerializeWith;
import dtmlibs.config.annotation.ValidateWith;
import dtmlibs.config.field.FieldInstance;
import dtmlibs.config.field.PropertyVetoException;
import dtmlibs.config.field.Validator;
import dtmlibs.config.field.VirtualField;
import dtmlibs.config.properties.PropertiesWrapper;
import dtmlibs.config.properties.PropertyAliases;
import dtmlibs.config.properties.PropertyHandler;
import dtmlibs.config.serializers.CustomSerializer2;
import org.jetbrains.annotations.Nullable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
@Comment({"Test the header out", "\"It works,\" they say"})
@SerializableAs("ComprehensiveTestClass")
public class Comprehensive extends PropertiesWrapper {
public static final int A_INT = 2123012310;
public static final long A_LONG = 1293750971209172093L;
public static final double A_DOUBLE = 304205905924.34925710270957D;
public static final float A_FLOAT = 12305012.3451231F;
public static final short A_SHORT = 12125;
public static final byte A_BYTE = 124;
public static final boolean A_BOOLEAN = true;
public static final char A_CHAR = 'h';
public static final BigInteger A_BIG_INTEGER = new BigInteger("12395357293415971941723985719273123");
public static final BigDecimal A_BIG_DECIMAL = new BigDecimal("123105810586823404825141235.112038105810831029301581028");
public static final UUID A_UUID = UUID.randomUUID();
public static final String A_INT_DESCRIPTION = "Just some int";
public static final String A_INT_COMMENT_1 = "Just some int";
public static final String A_INT_COMMENT_2 = "Really.";
public static final String[] A_INT_COMMENTS = {A_INT_COMMENT_1, A_INT_COMMENT_2};
public static final int T_INT = 5;
public static final String NAME = "Comprehensive";
public static final List<String> WORD_LIST = new ArrayList<>();
public static final List<String> WORD_LIST_2 = new CopyOnWriteArrayList<>();
public static final List<List<String>> LIST_LIST = new ArrayList<>();
public static final Child CHILD = new Child(true);
public static final Parent PARENT = new Parent(CHILD);
public static final List<Object> RANDOM_LIST = new ArrayList<>();
public static final Map<String, Object> STRING_OBJECT_MAP = new HashMap<>();
public static final Custom CUSTOM = new Custom("custom");
public static final Locale LOCALE = Locale.ENGLISH;
public static final List<Double> DOUBLE_LIST = new ArrayList<>();
static {
WORD_LIST.add("test");
WORD_LIST.add("lol");
WORD_LIST_2.add("omg");
WORD_LIST_2.add("words");
LIST_LIST.add(WORD_LIST);
LIST_LIST.add(WORD_LIST_2);
RANDOM_LIST.add(PARENT);
RANDOM_LIST.add(CHILD);
RANDOM_LIST.add(false);
STRING_OBJECT_MAP.put("parent", PARENT);
STRING_OBJECT_MAP.put("child", CHILD);
STRING_OBJECT_MAP.put("String", "String");
STRING_OBJECT_MAP.put("list", WORD_LIST);
STRING_OBJECT_MAP.put("custom1", CUSTOM);
STRING_OBJECT_MAP.put("custom2", CUSTOM);
RANDOM_LIST.add(STRING_OBJECT_MAP);
DOUBLE_LIST.add(123151512615D);
DOUBLE_LIST.add(62342362.1231231251515D);
PropertyAliases.createAlias(Comprehensive.class, "cname", "custom", "name");
}
public Comprehensive() { }
public static class NameValidator implements Validator<String> {
@Nullable
@Override
public String validateChange(@Nullable String newValue, @Nullable String oldValue) throws PropertyVetoException {
if (newValue != null && newValue.length() >= 4) {
return newValue;
} else {
return oldValue;
}
}
}
public static class SimpleHandler implements PropertyHandler {
private List<Simple> convertToList(String value) {
String[] values = value.split(",");
List<Simple> list = new ArrayList<Simple>(values.length);
for (String s : values) {
list.add(new Simple(s));
}
return list;
}
@Override
public void set(@NotNull FieldInstance field, @NotNull String newValue) throws PropertyVetoException, UnsupportedOperationException {
field.setValue(convertToList(newValue));
}
@Override
public void add(@NotNull FieldInstance field, @NotNull String valueToAdd) throws PropertyVetoException, UnsupportedOperationException {
((List) field.getValue()).addAll(convertToList(valueToAdd));
}
@Override
public void remove(@NotNull FieldInstance field, @NotNull String valueToRemove) throws PropertyVetoException, UnsupportedOperationException {
((List) field.getValue()).removeAll(convertToList(valueToRemove));
}
@Override
public void clear(@NotNull FieldInstance field, @Nullable String valueToClear) throws PropertyVetoException, UnsupportedOperationException {
((List) field.getValue()).clear();
}
}
@Description(A_INT_DESCRIPTION)
@Comment({A_INT_COMMENT_1, A_INT_COMMENT_2})
public int aInt = A_INT;
public long aLong = A_LONG;
public double aDouble = A_DOUBLE;
public float aFloat = A_FLOAT;
public short aShort = A_SHORT;
public byte aByte = A_BYTE;
public boolean aBoolean = A_BOOLEAN;
public char aChar = A_CHAR;
public BigInteger aBigInteger = A_BIG_INTEGER;
public BigDecimal aBigDecimal = A_BIG_DECIMAL;
public UUID aUUID = A_UUID;
public List<Double> doubleList = DOUBLE_LIST;
public transient int tInt = T_INT;
@ValidateWith(NameValidator.class)
public String name = NAME;
public List<String> wordList = new ArrayList<>(WORD_LIST);
public List<String> wordList2 = new ArrayList<>(WORD_LIST_2);
public List<List<String>> listList = new ArrayList<>(LIST_LIST);
public List<Object> randomList = new ArrayList<>(RANDOM_LIST);
public Map<String, Object> stringObjectMap = new HashMap<>(STRING_OBJECT_MAP);
public final Custom custom = new Custom(CUSTOM.name);
@SerializeWith(CustomSerializer2.class)
public Custom custom2 = new Custom(CUSTOM.name);
@Immutable
public String immutableString = NAME;
public final Simple simple = new Simple();
public final String finalString = NAME;
public final VirtualField<Anum> virtualEnum = new AnumField();
static class AnumField implements VirtualField<Anum> {
private Anum actual = Anum.A;
@Override
public Anum get() {
return actual;
}
@Override
public void set(final Anum newValue) {
actual = newValue;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnumField anumField = (AnumField) o;
return actual == anumField.actual;
}
@Override
public int hashCode() {
return actual.hashCode();
}
}
public FakeEnum fakeEnum = FakeEnum.FAKE_2;
public Locale locale = LOCALE;
private VirtualField<List<?>> testWildCardListVirtualProp;
private VirtualField<?> testWildCardVirtualProp;
private VirtualField<List<String>> testTypedVirtualProp;
private List<?> genericList = new ArrayList();
@HandlePropertyWith(SimpleHandler.class)
public List<Simple> simpleList = new ArrayList<Simple>();
{
simpleList.add(new Simple("test"));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Comprehensive that = (Comprehensive) o;
if (aInt != that.aInt) return false;
if (aLong != that.aLong) return false;
if (Double.compare(that.aDouble, aDouble) != 0) return false;
if (Float.compare(that.aFloat, aFloat) != 0) return false;
if (aShort != that.aShort) return false;
if (aByte != that.aByte) return false;
if (aBoolean != that.aBoolean) return false;
if (aChar != that.aChar) return false;
if (tInt != that.tInt) return false;
if (!aBigInteger.equals(that.aBigInteger)) return false;
if (!aBigDecimal.equals(that.aBigDecimal)) return false;
if (!name.equals(that.name)) return false;
if (!wordList.equals(that.wordList)) return false;
if (!wordList2.equals(that.wordList2)) return false;
if (!listList.equals(that.listList)) return false;
if (!randomList.equals(that.randomList)) return false;
if (!stringObjectMap.equals(that.stringObjectMap)) return false;
if (!custom.equals(that.custom)) return false;
if (!custom2.equals(that.custom2)) return false;
if (!immutableString.equals(that.immutableString)) return false;
if (!simple.equals(that.simple)) return false;
if (!finalString.equals(that.finalString)) return false;
if (!virtualEnum.equals(that.virtualEnum)) return false;
if (!fakeEnum.equals(that.fakeEnum)) return false;
if (!locale.equals(that.locale)) return false;
if (!aUUID.equals(that.aUUID)) return false;
if (!doubleList.equals(that.doubleList)) return false;
//if (!testWildCardListVirtualProp.equals(that.testWildCardListVirtualProp)) return false;
//if (!testWildCardVirtualProp.equals(that.testWildCardVirtualProp)) return false;
//if (!testTypedVirtualProp.equals(that.testTypedVirtualProp)) return false;
if (!genericList.equals(that.genericList)) return false;
return simpleList.equals(that.simpleList);
}
@Override
public int hashCode() {
int result;
long temp;
result = aInt;
result = 31 * result + (int) (aLong ^ (aLong >>> 32));
temp = Double.doubleToLongBits(aDouble);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + (aFloat != +0.0f ? Float.floatToIntBits(aFloat) : 0);
result = 31 * result + (int) aShort;
result = 31 * result + (int) aByte;
result = 31 * result + (aBoolean ? 1 : 0);
result = 31 * result + (int) aChar;
result = 31 * result + aBigInteger.hashCode();
result = 31 * result + aBigDecimal.hashCode();
result = 31 * result + tInt;
result = 31 * result + name.hashCode();
result = 31 * result + wordList.hashCode();
result = 31 * result + wordList2.hashCode();
result = 31 * result + listList.hashCode();
result = 31 * result + randomList.hashCode();
result = 31 * result + stringObjectMap.hashCode();
result = 31 * result + custom.hashCode();
result = 31 * result + custom2.hashCode();
result = 31 * result + immutableString.hashCode();
result = 31 * result + simple.hashCode();
result = 31 * result + finalString.hashCode();
result = 31 * result + virtualEnum.hashCode();
result = 31 * result + fakeEnum.hashCode();
result = 31 * result + locale.hashCode();
result = 31 * result + genericList.hashCode();
result = 31 * result + simpleList.hashCode();
result = 31 * result + aUUID.hashCode();
result = 31 * result + doubleList.hashCode();
return result;
}
@Override
public String toString() {
return ObjectStringifier.toString(this);
}
}
| |
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer.rtsp;
import static com.google.common.truth.Truth.assertThat;
import android.net.Uri;
import androidx.annotation.Nullable;
import androidx.media3.common.ParserException;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.rtsp.RtspClient.PlaybackEventListener;
import androidx.media3.exoplayer.rtsp.RtspClient.SessionInfoListener;
import androidx.media3.exoplayer.rtsp.RtspMediaSource.RtspPlaybackException;
import androidx.media3.test.utils.robolectric.RobolectricUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.SocketFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests the {@link RtspClient} using the {@link RtspServer}. */
@RunWith(AndroidJUnit4.class)
public final class RtspClientTest {
private static final String SESSION_DESCRIPTION =
"v=0\r\n"
+ "o=- 1606776316530225 1 IN IP4 127.0.0.1\r\n"
+ "s=Exoplayer test\r\n"
+ "t=0 0\r\n"
+ "a=range:npt=0-50.46\r\n";
private static final RtspClient.PlaybackEventListener EMPTY_PLAYBACK_LISTENER =
new PlaybackEventListener() {
@Override
public void onRtspSetupCompleted() {}
@Override
public void onPlaybackStarted(
long startPositionUs, ImmutableList<RtspTrackTiming> trackTimingList) {}
@Override
public void onPlaybackError(RtspPlaybackException error) {}
};
private ImmutableList<RtpPacketStreamDump> rtpPacketStreamDumps;
private RtspClient rtspClient;
private RtspServer rtspServer;
@Before
public void setUp() throws Exception {
rtpPacketStreamDumps =
ImmutableList.of(
RtspTestUtils.readRtpPacketStreamDump("media/rtsp/h264-dump.json"),
RtspTestUtils.readRtpPacketStreamDump("media/rtsp/aac-dump.json"),
// MP4A-LATM is not supported at the moment.
RtspTestUtils.readRtpPacketStreamDump("media/rtsp/mp4a-latm-dump.json"));
}
@After
public void tearDown() {
Util.closeQuietly(rtspServer);
Util.closeQuietly(rtspClient);
}
@Test
public void connectServerAndClient_usesCustomSocketFactory() throws Exception {
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(
/* status= */ 200,
new RtspHeaders.Builder().add(RtspHeaders.PUBLIC, "OPTIONS, DESCRIBE").build());
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
return RtspTestUtils.newDescribeResponseWithSdpMessage(
SESSION_DESCRIPTION, rtpPacketStreamDumps, requestedUri);
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicBoolean didCallCreateSocket = new AtomicBoolean();
SocketFactory socketFactory =
new SocketFactory() {
@Override
public Socket createSocket(String host, int port) throws IOException {
didCallCreateSocket.set(true);
return SocketFactory.getDefault().createSocket(host, port);
}
@Override
public Socket createSocket(String s, int i, InetAddress inetAddress, int i1)
throws IOException {
didCallCreateSocket.set(true);
return SocketFactory.getDefault().createSocket(s, i, inetAddress, i1);
}
@Override
public Socket createSocket(InetAddress inetAddress, int i) throws IOException {
didCallCreateSocket.set(true);
return SocketFactory.getDefault().createSocket(inetAddress, i);
}
@Override
public Socket createSocket(
InetAddress inetAddress, int i, InetAddress inetAddress1, int i1) throws IOException {
didCallCreateSocket.set(true);
return SocketFactory.getDefault().createSocket(inetAddress, i, inetAddress1, i1);
}
};
AtomicReference<ImmutableList<RtspMediaTrack>> tracksInSession = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {
tracksInSession.set(tracks);
}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
socketFactory,
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> tracksInSession.get() != null);
assertThat(didCallCreateSocket.get()).isTrue();
}
@Test
public void connectServerAndClient_serverSupportsDescribe_updatesSessionTimeline()
throws Exception {
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(
/* status= */ 200,
new RtspHeaders.Builder().add(RtspHeaders.PUBLIC, "OPTIONS, DESCRIBE").build());
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
return RtspTestUtils.newDescribeResponseWithSdpMessage(
SESSION_DESCRIPTION, rtpPacketStreamDumps, requestedUri);
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicReference<ImmutableList<RtspMediaTrack>> tracksInSession = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {
tracksInSession.set(tracks);
}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
SocketFactory.getDefault(),
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> tracksInSession.get() != null);
assertThat(tracksInSession.get()).hasSize(2);
assertThat(rtspClient.getState()).isEqualTo(RtspClient.RTSP_STATE_UNINITIALIZED);
}
@Test
public void connectServerAndClient_describeRedirects_updatesSessionTimeline() throws Exception {
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(/* status= */ 200, RtspHeaders.EMPTY);
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
if (!requestedUri.getPath().contains("redirect")) {
return new RtspResponse(
301,
new RtspHeaders.Builder()
.add(
RtspHeaders.LOCATION,
requestedUri.buildUpon().appendEncodedPath("redirect").build().toString())
.build());
}
return RtspTestUtils.newDescribeResponseWithSdpMessage(
SESSION_DESCRIPTION, rtpPacketStreamDumps, requestedUri);
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicReference<ImmutableList<RtspMediaTrack>> tracksInSession = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {
tracksInSession.set(tracks);
}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
SocketFactory.getDefault(),
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> tracksInSession.get() != null);
assertThat(tracksInSession.get()).hasSize(2);
assertThat(rtspClient.getState()).isEqualTo(RtspClient.RTSP_STATE_UNINITIALIZED);
}
@Test
public void
connectServerAndClient_serverSupportsDescribeNoHeaderInOptions_updatesSessionTimeline()
throws Exception {
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(/* status= */ 200, RtspHeaders.EMPTY);
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
return RtspTestUtils.newDescribeResponseWithSdpMessage(
SESSION_DESCRIPTION, rtpPacketStreamDumps, requestedUri);
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicReference<ImmutableList<RtspMediaTrack>> tracksInSession = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {
tracksInSession.set(tracks);
}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
SocketFactory.getDefault(),
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> tracksInSession.get() != null);
assertThat(tracksInSession.get()).hasSize(2);
assertThat(rtspClient.getState()).isEqualTo(RtspClient.RTSP_STATE_UNINITIALIZED);
}
@Test
public void connectServerAndClient_serverDoesNotSupportDescribe_doesNotUpdateTimeline()
throws Exception {
AtomicBoolean clientHasSentDescribeRequest = new AtomicBoolean();
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(
/* status= */ 200,
new RtspHeaders.Builder().add(RtspHeaders.PUBLIC, "OPTIONS").build());
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
clientHasSentDescribeRequest.set(true);
return RtspTestUtils.RTSP_ERROR_METHOD_NOT_ALLOWED;
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicReference<String> failureMessage = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {
failureMessage.set(message);
}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
SocketFactory.getDefault(),
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> failureMessage.get() != null);
assertThat(failureMessage.get()).contains("DESCRIBE not supported.");
assertThat(clientHasSentDescribeRequest.get()).isFalse();
assertThat(rtspClient.getState()).isEqualTo(RtspClient.RTSP_STATE_UNINITIALIZED);
}
@Test
public void connectServerAndClient_malformedSdpInDescribeResponse_doesNotUpdateTimeline()
throws Exception {
class ResponseProvider implements RtspServer.ResponseProvider {
@Override
public RtspResponse getOptionsResponse() {
return new RtspResponse(
/* status= */ 200,
new RtspHeaders.Builder().add(RtspHeaders.PUBLIC, "OPTIONS, DESCRIBE").build());
}
@Override
public RtspResponse getDescribeResponse(Uri requestedUri, RtspHeaders headers) {
// This session description misses required the o, t and s tags.
return RtspTestUtils.newDescribeResponseWithSdpMessage(
/* sessionDescription= */ "v=0\r\n", rtpPacketStreamDumps, requestedUri);
}
}
rtspServer = new RtspServer(new ResponseProvider());
AtomicReference<Throwable> failureCause = new AtomicReference<>();
rtspClient =
new RtspClient(
new SessionInfoListener() {
@Override
public void onSessionTimelineUpdated(
RtspSessionTiming timing, ImmutableList<RtspMediaTrack> tracks) {}
@Override
public void onSessionTimelineRequestFailed(
String message, @Nullable Throwable cause) {
failureCause.set(cause);
}
},
EMPTY_PLAYBACK_LISTENER,
/* userAgent= */ "ExoPlayer:RtspClientTest",
RtspTestUtils.getTestUri(rtspServer.startAndGetPortNumber()),
SocketFactory.getDefault(),
/* debugLoggingEnabled= */ false);
rtspClient.start();
RobolectricUtil.runMainLooperUntil(() -> failureCause.get() != null);
assertThat(failureCause.get()).hasCauseThat().isInstanceOf(ParserException.class);
assertThat(rtspClient.getState()).isEqualTo(RtspClient.RTSP_STATE_UNINITIALIZED);
}
}
| |
/*
* Copyright (c) 2002-2021, City of Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.portal.web.user;
import fr.paris.lutece.portal.business.user.AdminUser;
import fr.paris.lutece.portal.business.user.AdminUserHome;
import fr.paris.lutece.portal.business.user.authentication.LuteceDefaultAdminUser;
import fr.paris.lutece.portal.business.user.log.UserLog;
import fr.paris.lutece.portal.business.user.log.UserLogHome;
import fr.paris.lutece.portal.service.admin.AccessDeniedException;
import fr.paris.lutece.portal.service.admin.AdminAuthenticationService;
import fr.paris.lutece.portal.service.admin.AdminUserService;
import fr.paris.lutece.portal.service.i18n.I18nService;
import fr.paris.lutece.portal.service.init.AppInfo;
import fr.paris.lutece.portal.service.mail.MailService;
import fr.paris.lutece.portal.service.message.AdminMessage;
import fr.paris.lutece.portal.service.message.AdminMessageService;
import fr.paris.lutece.portal.service.portal.PortalService;
import fr.paris.lutece.portal.service.security.SecurityTokenService;
import fr.paris.lutece.portal.service.spring.SpringContextService;
import fr.paris.lutece.portal.service.template.AppTemplateService;
import fr.paris.lutece.portal.service.util.AppException;
import fr.paris.lutece.portal.service.util.AppHTTPSService;
import fr.paris.lutece.portal.service.util.AppLogService;
import fr.paris.lutece.portal.service.util.AppPathService;
import fr.paris.lutece.portal.service.util.AppPropertiesService;
import fr.paris.lutece.portal.web.constants.Messages;
import fr.paris.lutece.portal.web.constants.Parameters;
import fr.paris.lutece.portal.web.l10n.LocaleService;
import fr.paris.lutece.util.ReferenceList;
import fr.paris.lutece.util.html.HtmlTemplate;
import fr.paris.lutece.util.http.SecurityUtil;
import fr.paris.lutece.util.password.IPasswordFactory;
import fr.paris.lutece.util.string.StringUtil;
import fr.paris.lutece.util.url.UrlItem;
import java.io.IOException;
import org.apache.commons.lang.StringUtils;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* This class provides the user interface to manage login features ( login, logout, ... )
*/
public class AdminLoginJspBean implements Serializable
{
/**
* Serial version UID
*/
private static final long serialVersionUID = 1437296329596757569L;
// //////////////////////////////////////////////////////////////////////////
// Constants
private static final String ERROR_INVALID_TOKEN = "Invalid security token";
private static final String CONSTANT_EMAIL_DELIMITER = ";";
private static final String CONSTANT_EMPTY_STRING = "";
private static final String CONSTANT_SLASH = "/";
private static final String CONSTANT_HTTP = "http";
private static final String REGEX_ID = "^[\\d]+$";
// Jsp
private static final String JSP_URL_MODIFY_DEFAULT_USER_PASSOWRD = "jsp/admin/user/ModifyDefaultUserPassword.jsp";
private static final String JSP_URL_FORM_CONTACT = "AdminFormContact.jsp";
private static final String JSP_URL_DO_ADMIN_LOGIN = "jsp/admin/DoAdminLogin.jsp";
private static final String JSP_URL_ADMIN_LOGIN = "jsp/admin/AdminLogin.jsp";
// Templates
private static final String TEMPLATE_ADMIN_LOGIN = "admin/admin_login.html";
private static final String TEMPLATE_ADMIN_FORGOT_PASSWORD = "admin/admin_forgot_password.html";
private static final String TEMPLATE_ADMIN_RESET_PASSWORD = "admin/admin_reset_password.html";
private static final String TEMPLATE_ADMIN_FORGOT_LOGIN = "admin/admin_forgot_login.html";
private static final String TEMPLATE_ADMIN_FORM_CONTACT = "admin/admin_form_contact.html";
private static final String TEMPLATE_ADMIN_EMAIL_FORGOT_PASSWORD = "admin/admin_email_forgot_password.html";
private static final String TEMPLATE_ADMIN_EMAIL_FORGOT_LOGIN = "admin/admin_email_forgot_login.html";
// Markers
private static final String MARK_PARAMS_LIST = "params_list";
private static final String MARK_FORGOT_PASSWORD_URL = "forgot_password_url";
private static final String MARK_FORGOT_LOGIN_URL = "forgot_login_url";
private static final String MARK_PARAM_VERSION = "version";
private static final String MARK_SITE_NAME = "site_name";
private static final String MARK_LOGIN_URL = "login_url";
private static final String MARK_DO_ADMIN_LOGIN_URL = "do_admin_login_url";
private static final String MARK_SITE_LINK = "site_link";
private static final String MARK_LOGIN = "login";
private static final String MARK_USER_ID = "user_id";
private static final String MARK_TOKEN = "token";
private static final String MARK_TIMESTAMP = "timestamp";
private static final String MARK_RESET_TOKEN_VALIDITY = "reset_password_validity";
private static final String MARK_LOCK_RESET_TOKEN_TO_SESSION = "lock_reset_token_to_session";
private static final String SESSION_ATTRIBUTE_USER = "lutece_admin_user"; // Used by all JSP
// parameters
private static final String PARAMETER_MESSAGE = "message_contact";
private static final String PARAMETER_TOKEN = "token";
private static final String PARAMETER_TIMESTAMP = "ts";
// I18n message keys
private static final String MESSAGE_SENDING_SUCCESS = "portal.admin.message.admin_forgot_password.sendingSuccess";
private static final String MESSAGE_ADMIN_SENDING_SUCCESS = "portal.admin.message.admin_form_contact.sendingSuccess";
private static final String MESSAGE_EMAIL_SUBJECT = "portal.admin.admin_forgot_password.email.subject";
private static final String MESSAGE_FORGOT_LOGIN_EMAIL_SUBJECT = "portal.admin.admin_forgot_login.email.subject";
private static final String MESSAGE_FORGOT_LOGIN_SENDING_SUCCESS = "portal.admin.message.admin_forgot_login.sendingSuccess";
private static final String MESSAGE_EMAIL_ADMIN_SUBJECT = "portal.admin.admin_form_contact.email.subject";
private static final String MESSAGE_WRONG_EMAIL_FORMAT = "portal.admin.message.admin_forgot_login.wrongEmailFormat";
private static final String MESSAGE_CONTROL_PASSWORD_NO_CORRESPONDING = "portal.users.message.password.confirm.error";
private static final String MESSAGE_INVALID_RESET_TOKEN = "portal.admin.message.invalid.reset.token";
private static final String MESSAGE_EXPIRED_RESET_TOKEN = "portal.admin.message.expired.reset.token";
private static final String MESSAGE_RESET_PASSORWD_SUCCESS = "portal.admin.message.reset.password.success";
// Properties
private static final String PROPERTY_LEVEL = "askPasswordReinitialization.admin.level";
/**
* Returns the view of login form
*
* @param request
* The request
* @param response
* The response
* @return The HTML form
* @throws IOException
* when redirection doesn't work
*/
public String getLogin( HttpServletRequest request, HttpServletResponse response ) throws IOException
{
HashMap<String, Object> model = new HashMap<>( );
HttpSession session = request.getSession( );
if ( session != null )
{
// Invalidate a previous session
session.removeAttribute( SESSION_ATTRIBUTE_USER );
// Put real base url in session
request.getSession( ).setAttribute( AppPathService.SESSION_BASE_URL, AppPathService.getBaseUrl( request ) );
if ( !JSP_URL_ADMIN_LOGIN.equals( AdminAuthenticationService.getInstance( ).getLoginPageUrl( ) ) )
{
String strRedirectUrl = AdminAuthenticationService.getInstance( ).getLoginPageUrl( );
if ( strRedirectUrl == null )
{
strRedirectUrl = AppPathService.getAdminMenuUrl( );
}
response.sendRedirect( AppPathService.resolveRedirectUrl( request, strRedirectUrl ).getUrl( ) );
return null;
}
}
Locale locale = AdminUserService.getLocale( request );
Enumeration<String> enumParams = request.getParameterNames( );
ReferenceList listParams = new ReferenceList( );
String strParamName;
while ( enumParams.hasMoreElements( ) )
{
strParamName = enumParams.nextElement( );
String strParamValue = request.getParameter( strParamName );
listParams.addItem( strParamName, strParamValue );
}
StringBuilder sbUrl = new StringBuilder( );
if ( AppHTTPSService.isHTTPSSupportEnabled( ) )
{
sbUrl.append( AppHTTPSService.getHTTPSUrl( request ) );
}
else
{
sbUrl.append( AppPathService.getBaseUrl( request ) );
}
if ( !sbUrl.toString( ).endsWith( CONSTANT_SLASH ) )
{
sbUrl.append( CONSTANT_SLASH );
}
sbUrl.append( JSP_URL_DO_ADMIN_LOGIN );
model.put( MARK_PARAM_VERSION, AppInfo.getVersion( ) );
model.put( MARK_SITE_NAME, PortalService.getSiteName( ) );
model.put( MARK_PARAMS_LIST, listParams );
model.put( MARK_FORGOT_PASSWORD_URL, AdminAuthenticationService.getInstance( ).getLostPasswordPageUrl( ) );
model.put( MARK_FORGOT_LOGIN_URL, AdminAuthenticationService.getInstance( ).getLostLoginPageUrl( ) );
model.put( MARK_DO_ADMIN_LOGIN_URL, sbUrl.toString( ) );
model.put( MARK_TOKEN, SecurityTokenService.getInstance( ).getToken( request, TEMPLATE_ADMIN_LOGIN ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_LOGIN, locale, model );
return template.getHtml( );
}
/**
* Returns the view of forgot password form
*
* @param request
* The request
* @return The HTML form
*/
public String getForgotPassword( HttpServletRequest request )
{
Map<String, Object> model = new HashMap<>( );
// Invalidate a previous session
HttpSession session = request.getSession( );
if ( session != null )
{
session.removeAttribute( SESSION_ATTRIBUTE_USER );
}
Locale locale = AdminUserService.getLocale( request );
Enumeration<String> enumParams = request.getParameterNames( );
ReferenceList listParams = new ReferenceList( );
String strParamName;
while ( enumParams.hasMoreElements( ) )
{
strParamName = enumParams.nextElement( );
String strParamValue = request.getParameter( strParamName );
listParams.addItem( strParamName, strParamValue );
}
model.put( MARK_PARAM_VERSION, AppInfo.getVersion( ) );
model.put( MARK_PARAMS_LIST, listParams );
model.put( MARK_SITE_NAME, PortalService.getSiteName( ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_FORGOT_PASSWORD, locale, model );
return template.getHtml( );
}
public String getResetPassword( HttpServletRequest request )
{
// Invalidate a previous session
HttpSession session = request.getSession( false );
if ( session != null )
{
session.removeAttribute( SESSION_ATTRIBUTE_USER );
}
Map<String, Object> model = new HashMap<>( );
Locale locale = AdminUserService.getLocale( request );
Enumeration<String> enumParams = request.getParameterNames( );
ReferenceList listParams = new ReferenceList( );
String strParamName;
while ( enumParams.hasMoreElements( ) )
{
strParamName = enumParams.nextElement( );
String strParamValue = request.getParameter( strParamName );
listParams.addItem( strParamName, strParamValue );
}
model.put( MARK_PARAM_VERSION, AppInfo.getVersion( ) );
model.put( MARK_PARAMS_LIST, listParams );
model.put( MARK_SITE_NAME, PortalService.getSiteName( ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_RESET_PASSWORD, locale, model );
return template.getHtml( );
}
/**
* Returns the view of forgot password form
*
* @param request
* The request
* @return The HTML form
*/
public String getForgotLogin( HttpServletRequest request )
{
Map<String, Object> model = new HashMap<>( );
// Invalidate a previous session
HttpSession session = request.getSession( );
if ( session != null )
{
session.removeAttribute( SESSION_ATTRIBUTE_USER );
}
Locale locale = AdminUserService.getLocale( request );
Enumeration<String> enumParams = request.getParameterNames( );
ReferenceList listParams = new ReferenceList( );
String strParamName;
while ( enumParams.hasMoreElements( ) )
{
strParamName = enumParams.nextElement( );
String strParamValue = request.getParameter( strParamName );
listParams.addItem( strParamName, strParamValue );
}
model.put( MARK_PARAM_VERSION, AppInfo.getVersion( ) );
model.put( MARK_PARAMS_LIST, listParams );
model.put( MARK_SITE_NAME, PortalService.getSiteName( ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_FORGOT_LOGIN, locale, model );
return template.getHtml( );
}
/**
* Get the admin contact form
*
* @param request
* The Http request
* @return The HTML form
*/
public String getFormContact( HttpServletRequest request )
{
HashMap<String, Object> model = new HashMap<>( );
// Invalidate a previous session
HttpSession session = request.getSession( );
if ( session != null )
{
session.removeAttribute( SESSION_ATTRIBUTE_USER );
}
Locale locale = AdminUserService.getLocale( request );
model.put( MARK_PARAM_VERSION, AppInfo.getVersion( ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_FORM_CONTACT, locale, model );
return template.getHtml( );
}
/**
* Process the login of user
*
* @param request
* The HTTP Request
* @return The Jsp URL of the process result
* @throws AccessDeniedException
*/
public String doLogin( HttpServletRequest request ) throws AccessDeniedException
{
if ( request.getScheme( ).equals( CONSTANT_HTTP ) && AppHTTPSService.isHTTPSSupportEnabled( ) )
{
return JSP_URL_ADMIN_LOGIN;
}
if ( !SecurityTokenService.getInstance( ).validate( request, TEMPLATE_ADMIN_LOGIN ) )
{
throw new AccessDeniedException( ERROR_INVALID_TOKEN );
}
// recovery of the login attributes
String strAccessCode = request.getParameter( Parameters.ACCESS_CODE );
String strPassword = request.getParameter( Parameters.PASSWORD );
if ( strAccessCode == null || strPassword == null )
{
// TIME RESISTANT ATTACK
// Computation time is equal to the time needed by a legitimate user
strAccessCode = "";
strPassword = "";
}
String strLoginUrl = AdminAuthenticationService.getInstance( ).getLoginPageUrl( );
try
{
AdminAuthenticationService.getInstance( ).loginUser( request, strAccessCode, strPassword );
}
catch( FailedLoginException ex )
{
// Creating a record of connections log
UserLog userLog = new UserLog( );
userLog.setAccessCode( strAccessCode );
userLog.setIpAddress( SecurityUtil.getRealIp( request ) );
userLog.setDateLogin( new java.sql.Timestamp( new java.util.Date( ).getTime( ) ) );
userLog.setLoginStatus( UserLog.LOGIN_DENIED ); // will be inserted only if access denied
UserLogHome.addUserLog( userLog );
return AdminMessageService.getMessageUrl( request, Messages.MESSAGE_AUTH_FAILURE, strLoginUrl, AdminMessage.TYPE_STOP );
}
catch( LoginException ex )
{
AppLogService.error( "Error during connection for user access code :" + SecurityUtil.logForgingProtect( strAccessCode ), ex );
return AdminMessageService.getMessageUrl( request, Messages.MESSAGE_AUTH_FAILURE, strLoginUrl, AdminMessage.TYPE_STOP );
}
UrlItem url;
AdminUser user = AdminUserHome.findUserByLogin( strAccessCode );
if ( user.isPasswordReset( ) )
{
String strRedirectUrl = AdminMessageService.getMessageUrl( request, Messages.MESSAGE_USER_MUST_CHANGE_PASSWORD,
JSP_URL_MODIFY_DEFAULT_USER_PASSOWRD, AdminMessage.TYPE_ERROR );
url = new UrlItem( strRedirectUrl );
}
else
{
String strNextUrl = AdminAuthenticationService.getInstance( ).getLoginNextUrl( request );
if ( StringUtils.isNotBlank( strNextUrl ) )
{
url = new UrlItem( strNextUrl );
}
else
{
url = AppPathService.resolveRedirectUrl( request, AppPathService.getAdminMenuUrl( ) );
}
}
return url.getUrl( );
}
/**
* Process the sending to user password
*
* @param request
* The HTTP Request
* @return The Jsp URL of the process result
* @throws Exception
* The exception
*/
public String doForgotPassword( HttpServletRequest request )
{
// get mail from user
String strAccessCode = request.getParameter( Parameters.ACCESS_CODE );
Locale locale = AdminUserService.getLocale( request );
if ( StringUtils.isEmpty( strAccessCode ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
if ( locale == null )
{
locale = LocaleService.getDefault( );
}
// if user or mail not found, send admin message
AdminUser user = AdminUserHome.findUserByLogin( strAccessCode );
if ( ( user == null ) || StringUtils.isEmpty( user.getEmail( ) ) )
{
return JSP_URL_FORM_CONTACT;
}
// make password reset token
Date timestamp = new Date( );
String strToken = AdminUserService.getUserPasswordResetToken( user, timestamp, request );
// send password rest token by e-mail
String strSenderEmail = MailService.getNoReplyEmail( );
String strEmailSubject = I18nService.getLocalizedString( MESSAGE_EMAIL_SUBJECT, locale );
HashMap<String, Object> model = new HashMap<>( );
model.put( MARK_TOKEN, strToken );
model.put( MARK_TIMESTAMP, timestamp.getTime( ) );
model.put( MARK_USER_ID, user.getUserId( ) );
model.put( MARK_LOGIN_URL, AppPathService.getBaseUrl( request ) + "jsp/admin/AdminResetPassword.jsp" );
model.put( MARK_SITE_LINK, MailService.getSiteLink( AppPathService.getBaseUrl( request ), false ) );
Date tokenExpiryDate = new Date(
timestamp.getTime( ) + ( 1000L * 60 * AdminUserService.getIntegerSecurityParameter( AdminUserService.DSKEY_RESET_TOKEN_VALIDITY ) ) );
model.put( MARK_RESET_TOKEN_VALIDITY, tokenExpiryDate );
model.put( MARK_LOCK_RESET_TOKEN_TO_SESSION, AdminUserService.getBooleanSecurityParameter( AdminUserService.DSKEY_LOCK_RESET_TOKEN_TO_SESSION ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_EMAIL_FORGOT_PASSWORD, locale, model );
MailService.sendMailHtml( user.getEmail( ), strSenderEmail, strSenderEmail, strEmailSubject, template.getHtml( ) );
return AdminMessageService.getMessageUrl( request, MESSAGE_SENDING_SUCCESS, JSP_URL_ADMIN_LOGIN, AdminMessage.TYPE_INFO );
}
public String doResetPassword( HttpServletRequest request )
{
if ( !"POST".equals( request.getMethod( ) ) )
{
throw new AppException( "This method should requested via POST" );
}
String strUserId = request.getParameter( Parameters.USER_ID );
if ( !StringUtils.isNumeric( strUserId ) || StringUtils.isBlank( strUserId ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
String strTimestamp = request.getParameter( PARAMETER_TIMESTAMP );
if ( !StringUtils.isNumeric( strTimestamp ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
String strToken = request.getParameter( PARAMETER_TOKEN );
if ( StringUtils.isEmpty( strToken ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
String strNewPassword = request.getParameter( Parameters.NEW_PASSWORD );
String strConfirmNewPassword = request.getParameter( Parameters.CONFIRM_NEW_PASSWORD );
if ( StringUtils.isEmpty( strNewPassword ) || StringUtils.isEmpty( strConfirmNewPassword ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
if ( !strNewPassword.equals( strConfirmNewPassword ) )
{
return AdminMessageService.getMessageUrl( request, MESSAGE_CONTROL_PASSWORD_NO_CORRESPONDING, AdminMessage.TYPE_STOP );
}
LuteceDefaultAdminUser user = AdminUserHome.findLuteceDefaultAdminUserByPrimaryKey( Integer.parseInt( strUserId ) );
if ( user == null )
{
user = new LuteceDefaultAdminUser( );
}
Date timestamp = new Date( Long.valueOf( strTimestamp ) );
String strSystemToken = AdminUserService.getUserPasswordResetToken( user, timestamp, request );
if ( !strSystemToken.equals( strToken ) )
{
return AdminMessageService.getMessageUrl( request, MESSAGE_INVALID_RESET_TOKEN, AdminMessage.TYPE_STOP );
}
long lTokenAge = new Date( ).getTime( ) - timestamp.getTime( );
if ( lTokenAge < 0 || lTokenAge > ( 1000L * 60 * AdminUserService.getIntegerSecurityParameter( AdminUserService.DSKEY_RESET_TOKEN_VALIDITY ) ) )
{
return AdminMessageService.getMessageUrl( request, MESSAGE_EXPIRED_RESET_TOKEN, AdminMessage.TYPE_STOP );
}
String strUrl = AdminUserService.checkPassword( request, strNewPassword, user.getUserId( ) );
if ( StringUtils.isNotEmpty( strUrl ) )
{
return strUrl;
}
// all checks are OK. Proceed to password change
user.setPasswordMaxValidDate( AdminUserService.getPasswordMaxValidDate( ) );
IPasswordFactory passwordFactory = SpringContextService.getBean( IPasswordFactory.BEAN_NAME );
user.setPassword( passwordFactory.getPasswordFromCleartext( strNewPassword ) );
AdminUserHome.update( user );
AdminUserHome.insertNewPasswordInHistory( user.getPassword( ), user.getUserId( ) );
return AdminMessageService.getMessageUrl( request, MESSAGE_RESET_PASSORWD_SUCCESS, JSP_URL_ADMIN_LOGIN, AdminMessage.TYPE_INFO );
}
/**
* Process the sending of the login
*
* @param request
* The HTTP Request
* @return The Jsp URL of the process result
* @throws Exception
* The exception
*/
public String doForgotLogin( HttpServletRequest request )
{
String strEmail = request.getParameter( Parameters.EMAIL );
Locale locale = AdminUserService.getLocale( request );
if ( ( strEmail == null ) || strEmail.equals( CONSTANT_EMPTY_STRING ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
if ( !AdminUserService.checkEmail( strEmail ) )
{
return AdminMessageService.getMessageUrl( request, MESSAGE_WRONG_EMAIL_FORMAT, AdminMessage.TYPE_STOP );
}
if ( locale == null )
{
locale = LocaleService.getDefault( );
}
// if access code not found, send admin message
String strAccessCode = AdminUserHome.findUserByEmail( strEmail );
if ( StringUtils.isEmpty( strAccessCode ) )
{
return JSP_URL_FORM_CONTACT;
}
// send access code by e-mail
String strSenderEmail = MailService.getNoReplyEmail( );
String strEmailSubject = I18nService.getLocalizedString( MESSAGE_FORGOT_LOGIN_EMAIL_SUBJECT, locale );
HashMap<String, Object> model = new HashMap<>( );
model.put( MARK_LOGIN, strAccessCode );
model.put( MARK_LOGIN_URL, AppPathService.getBaseUrl( request ) + AdminAuthenticationService.getInstance( ).getLoginPageUrl( ) );
model.put( MARK_SITE_LINK, MailService.getSiteLink( AppPathService.getBaseUrl( request ), false ) );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_EMAIL_FORGOT_LOGIN, locale, model );
MailService.sendMailHtml( strEmail, strSenderEmail, strSenderEmail, strEmailSubject, template.getHtml( ) );
return AdminMessageService.getMessageUrl( request, MESSAGE_FORGOT_LOGIN_SENDING_SUCCESS, AdminMessage.TYPE_INFO );
}
/**
* Send the message to the adminsitrator(s)
*
* @param request
* The {@link HttpServletRequest}
* @return an adminMessage
*/
public String doFormContact( HttpServletRequest request )
{
// Get message, check if empty
String strMessage = request.getParameter( PARAMETER_MESSAGE );
if ( ( strMessage == null ) || strMessage.equals( CONSTANT_EMPTY_STRING ) )
{
return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP );
}
Locale locale = AdminUserService.getLocale( request );
if ( locale == null )
{
locale = LocaleService.getDefault( );
}
// send mail to admin wich have level
int nIdLevel = 0;
String strLevelId = AppPropertiesService.getProperty( PROPERTY_LEVEL, "0" );
if ( ( strLevelId != null ) && strLevelId.matches( REGEX_ID ) )
{
nIdLevel = Integer.parseInt( strLevelId );
}
Collection<AdminUser> adminUserList = AdminUserHome.findByLevel( nIdLevel );
StringBuilder sbMailsTo = new StringBuilder( CONSTANT_EMPTY_STRING );
for ( AdminUser adminUser : adminUserList )
{
if ( StringUtil.checkEmail( adminUser.getEmail( ) ) )
{
sbMailsTo.append( adminUser.getEmail( ) ).append( CONSTANT_EMAIL_DELIMITER );
}
}
String strMailsTo = sbMailsTo.toString( );
if ( !strMailsTo.equals( CONSTANT_EMPTY_STRING ) )
{
String strSenderEmail = MailService.getNoReplyEmail( );
String strEmailSubject = I18nService.getLocalizedString( MESSAGE_EMAIL_ADMIN_SUBJECT, locale );
MailService.sendMailHtml( strMailsTo, strSenderEmail, strSenderEmail, strEmailSubject, strMessage );
}
return AdminMessageService.getMessageUrl( request, MESSAGE_ADMIN_SENDING_SUCCESS, AdminAuthenticationService.getInstance( ).getLoginPageUrl( ),
AdminMessage.TYPE_INFO );
}
/**
* Process the logout of user
*
* @param request
* Http request
* @return The Jsp URL of the process result
*/
public String doLogout( HttpServletRequest request )
{
// Invalidation of the session
HttpSession session = request.getSession( );
if ( session != null )
{
session.invalidate( );
}
String strLoginUrl = AdminAuthenticationService.getInstance( ).getLoginPageUrl( );
return AdminMessageService.getMessageUrl( request, Messages.MESSAGE_LOGOUT, strLoginUrl, AdminMessage.TYPE_INFO );
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.firefox;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import com.google.common.io.CharStreams;
import com.google.common.io.Closeables;
import com.google.common.io.LineReader;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.json.Json;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
class Preferences {
/**
* The maximum amount of time scripts should be permitted to run. The user may increase this
* timeout, but may not set it below the default value.
*/
private static final String MAX_SCRIPT_RUN_TIME_KEY = "dom.max_script_run_time";
private static final int DEFAULT_MAX_SCRIPT_RUN_TIME = 30;
/**
* This pattern is used to parse preferences in user.js. It is intended to match all preference
* lines in the format generated by Firefox; it won't necessarily match all possible lines that
* Firefox will parse.
*
* e.g. if you have a line with extra spaces after the end-of-line semicolon, this pattern will
* not match that line because Firefox never generates lines like that.
*/
private static final Pattern PREFERENCE_PATTERN =
Pattern.compile("user_pref\\(\"([^\"]+)\", (\"?.+?\"?)\\);");
private Map<String, Object> immutablePrefs = Maps.newHashMap();
private Map<String, Object> allPrefs = Maps.newHashMap();
public Preferences(Reader defaults) {
readDefaultPreferences(defaults);
}
public Preferences(Reader defaults, File userPrefs) {
readDefaultPreferences(defaults);
try (FileReader reader = new FileReader(userPrefs)) {
readPreferences(reader);
} catch (IOException e) {
throw new WebDriverException(e);
}
}
@VisibleForTesting
public Preferences(Reader defaults, Reader reader) {
readDefaultPreferences(defaults);
try {
readPreferences(reader);
} catch (IOException e) {
throw new WebDriverException(e);
} finally {
try {
Closeables.close(reader, true);
} catch (IOException ignored) {
}
}
}
private void readDefaultPreferences(Reader defaultsReader) {
try {
String rawJson = CharStreams.toString(defaultsReader);
Map<String, Object> map = new Json().toType(rawJson, Map.class);
Map<String, Object> frozen = (Map<String, Object>) map.get("frozen");
for (Map.Entry<String, Object> entry : frozen.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof Long) {
value = new Integer(((Long)value).intValue());
}
setPreference(key, value);
immutablePrefs.put(key, value);
}
Map<String, Object> mutable = (Map<String, Object>) map.get("mutable");
for (Map.Entry<String, Object> entry : mutable.entrySet()) {
Object value = entry.getValue();
if (value instanceof Long) {
value = new Integer(((Long)value).intValue());
}
setPreference(entry.getKey(), value);
}
} catch (IOException e) {
throw new WebDriverException(e);
}
}
private void setPreference(String key, Object value) {
if (value instanceof String) {
setPreference(key, (String) value);
} else if (value instanceof Boolean) {
setPreference(key, ((Boolean) value).booleanValue());
} else {
setPreference(key, ((Number) value).intValue());
}
}
private void readPreferences(Reader reader) throws IOException {
LineReader allLines = new LineReader(reader);
String line = allLines.readLine();
while (line != null) {
Matcher matcher = PREFERENCE_PATTERN.matcher(line);
if (matcher.matches()) {
allPrefs.put(matcher.group(1), preferenceAsValue(matcher.group(2)));
}
line = allLines.readLine();
}
}
public void setPreference(String key, String value) {
checkPreference(key, value);
if (isStringified(value)) {
throw new IllegalArgumentException(
String.format("Preference values must be plain strings: %s: %s",
key, value));
}
allPrefs.put(key, value);
}
public void setPreference(String key, boolean value) {
checkPreference(key, value);
allPrefs.put(key, value);
}
public void setPreference(String key, int value) {
checkPreference(key, value);
allPrefs.put(key, value);
}
public void addTo(Preferences prefs) {
// TODO(simon): Stop being lazy
prefs.allPrefs.putAll(allPrefs);
}
public void addTo(FirefoxProfile profile) {
profile.getAdditionalPreferences().allPrefs.putAll(allPrefs);
}
public void writeTo(Writer writer) throws IOException {
for (Map.Entry<String, Object> pref : allPrefs.entrySet()) {
writer.append("user_pref(\"").append(pref.getKey()).append("\", ");
writer.append(valueAsPreference(pref.getValue()));
writer.append(");\n");
}
}
private String valueAsPreference(Object value) {
if (value instanceof String) {
return "\"" + escapeValueAsPreference((String) value) + "\"";
}
return escapeValueAsPreference(String.valueOf(value));
}
private String escapeValueAsPreference(String value) {
return value.replaceAll("\\\\", "\\\\\\\\").replaceAll("\"", "\\\\\"");
}
private Object preferenceAsValue(String toConvert) {
if (toConvert.startsWith("\"") && toConvert.endsWith("\"")) {
return toConvert.substring(1, toConvert.length() - 1).replaceAll("\\\\\\\\", "\\\\");
}
if ("false".equals(toConvert) || "true".equals(toConvert)) {
return Boolean.parseBoolean(toConvert);
}
try {
return Integer.parseInt(toConvert);
} catch (NumberFormatException e) {
throw new WebDriverException(e);
}
}
@VisibleForTesting
protected Object getPreference(String key) {
return allPrefs.get(key);
}
private boolean isStringified(String value) {
// Assume we a string is stringified (i.e. wrapped in " ") when
// the first character == " and the last character == "
return value.startsWith("\"") && value.endsWith("\"");
}
public void putAll(Map<String, Object> frozenPreferences) {
allPrefs.putAll(frozenPreferences);
}
private void checkPreference(String key, Object value) {
checkNotNull(value);
checkArgument(!immutablePrefs.containsKey(key) ||
(immutablePrefs.containsKey(key) && value.equals(immutablePrefs.get(key))),
"Preference %s may not be overridden: frozen value=%s, requested value=%s",
key, immutablePrefs.get(key), value);
if (MAX_SCRIPT_RUN_TIME_KEY.equals(key)) {
int n;
if (value instanceof String) {
n = Integer.parseInt((String) value);
} else if (value instanceof Integer) {
n = (Integer) value;
} else {
throw new IllegalArgumentException(String.format(
"%s value must be a number: %s", MAX_SCRIPT_RUN_TIME_KEY, value.getClass().getName()));
}
checkArgument(n == 0 || n >= DEFAULT_MAX_SCRIPT_RUN_TIME,
"%s must be == 0 || >= %s",
MAX_SCRIPT_RUN_TIME_KEY,
DEFAULT_MAX_SCRIPT_RUN_TIME);
}
}
}
| |
/*
* Copyright 2013-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.server.environment;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import com.amazonaws.services.simplesystemsmanagement.AWSSimpleSystemsManagement;
import com.amazonaws.services.simplesystemsmanagement.model.GetParametersByPathRequest;
import com.amazonaws.services.simplesystemsmanagement.model.GetParametersByPathResult;
import com.amazonaws.services.simplesystemsmanagement.model.Parameter;
import com.amazonaws.services.simplesystemsmanagement.model.ParameterType;
import org.apache.commons.lang3.RandomUtils;
import org.junit.Test;
import org.springframework.cloud.config.environment.Environment;
import org.springframework.cloud.config.environment.PropertySource;
import org.springframework.cloud.config.server.config.ConfigServerProperties;
import org.springframework.util.StringUtils;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.springframework.cloud.config.server.environment.AwsParameterStoreEnvironmentProperties.DEFAULT_PATH_SEPARATOR;
/**
* @author Iulian Antohe
*/
public class AwsParameterStoreEnvironmentRepositoryTests {
private static final Map<String, String> SHARED_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.root", "warn");
put("spring.cache.redis.time-to-live", "0");
}
};
private static final Map<String, String> SHARED_DEFAULT_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.root", "error");
put("spring.cache.redis.time-to-live", "1000");
}
};
private static final Map<String, String> SHARED_PRODUCTION_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.root", "fatal");
put("spring.cache.redis.time-to-live", "5000");
}
};
private static final Map<String, String> APPLICATION_SPECIFIC_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.com.example.service", "trace");
put("spring.cache.redis.time-to-live", "30000");
}
};
private static final Map<String, String> APPLICATION_SPECIFIC_DEFAULT_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.com.example.service", "debug");
put("spring.cache.redis.time-to-live", "60000");
}
};
private static final Map<String, String> APPLICATION_SPECIFIC_PRODUCTION_PROPERTIES = new HashMap<String, String>() {
{
put("logging.level.com.example.service", "info");
put("spring.cache.redis.time-to-live", "300000");
}
};
private final AWSSimpleSystemsManagement awsSsmClientMock = mock(AWSSimpleSystemsManagement.class,
"aws-ssm-client-mock");
private final ConfigServerProperties configServerProperties = new ConfigServerProperties();
private final AwsParameterStoreEnvironmentProperties environmentProperties = new AwsParameterStoreEnvironmentProperties();
private final AwsParameterStoreEnvironmentRepository repository = new AwsParameterStoreEnvironmentRepository(
awsSsmClientMock, configServerProperties, environmentProperties);
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithNullApplicationAndNullProfile() {
// Arrange
String application = null;
String profile = null;
String defaultApp = configServerProperties.getDefaultApplicationName();
String defaultProfile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(defaultProfile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(defaultApp, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithNullApplicationAndDefaultProfile() {
// Arrange
String application = null;
String profile = configServerProperties.getDefaultProfile();
String defaultApp = configServerProperties.getDefaultApplicationName();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(defaultApp, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithNullApplicationAndNonExistentProfile() {
// Arrange
String application = null;
String profile = randomAlphabetic(RandomUtils.nextInt(3, 33));
String defaultApp = configServerProperties.getDefaultApplicationName();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String name = "aws:ssm:parameter:/config/application/";
PropertySource ps = new PropertySource(name, SHARED_PROPERTIES);
Environment expected = new Environment(defaultApp, profiles, null, null, null);
expected.add(ps);
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithNullApplicationAndExistentProfile() {
// Arrange
String application = null;
String profile = "production";
String defaultApp = configServerProperties.getDefaultApplicationName();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedProdParamsPsName = "aws:ssm:parameter:/config/application-production/";
PropertySource sharedProdParamsPs = new PropertySource(sharedProdParamsPsName, SHARED_PRODUCTION_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(defaultApp, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedProdParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithDefaultApplicationAndNullProfile() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = null;
String defaultProfile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(defaultProfile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithDefaultApplicationAndDefaultProfile() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithDefaultApplicationAndNonExistentProfile() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = randomAlphabetic(RandomUtils.nextInt(3, 33));
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String name = "aws:ssm:parameter:/config/application/";
PropertySource ps = new PropertySource(name, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.add(ps);
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithDefaultApplicationAndExistentProfile() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = "production";
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedProdParamsPsName = "aws:ssm:parameter:/config/application-production/";
PropertySource sharedProdParamsPs = new PropertySource(sharedProdParamsPsName, SHARED_PRODUCTION_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedProdParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithNonExistentApplicationAndNullProfile() {
// Arrange
String application = randomAlphabetic(RandomUtils.nextInt(3, 33));
String profile = null;
String defaultProfile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(defaultProfile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithNonExistentApplicationAndDefaultProfile() {
// Arrange
String application = randomAlphabetic(RandomUtils.nextInt(3, 33));
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithNonExistentApplicationAndNonExistentProfile() {
// Arrange
String application = randomAlphabetic(RandomUtils.nextInt(3, 33));
String profile = randomAlphabetic(RandomUtils.nextInt(3, 33));
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String name = "aws:ssm:parameter:/config/application/";
PropertySource ps = new PropertySource(name, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.add(ps);
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithNonExistentApplicationAndExistentProfile() {
// Arrange
String application = randomAlphabetic(RandomUtils.nextInt(3, 33));
String profile = "production";
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedProdParamsPsName = "aws:ssm:parameter:/config/application-production/";
PropertySource sharedProdParamsPs = new PropertySource(sharedProdParamsPsName, SHARED_PRODUCTION_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedProdParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
@SuppressWarnings("ConstantConditions")
public void testFindOneWithExistentApplicationAndNullProfile() {
// Arrange
String application = "service";
String profile = null;
String defaultProfile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(defaultProfile);
String appSpecificDefaultParamsPsName = "aws:ssm:parameter:/config/service-default/";
PropertySource appSpecificDefaultParamsPs = new PropertySource(appSpecificDefaultParamsPsName,
APPLICATION_SPECIFIC_DEFAULT_PROPERTIES);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String appSpecificParamsPsName = "aws:ssm:parameter:/config/service/";
PropertySource appSpecificParamsPs = new PropertySource(appSpecificParamsPsName,
APPLICATION_SPECIFIC_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(
Arrays.asList(appSpecificDefaultParamsPs, sharedDefaultParamsPs, appSpecificParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithExistentApplicationAndDefaultProfile() {
// Arrange
String application = "service";
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String appSpecificDefaultParamsPsName = "aws:ssm:parameter:/config/service-default/";
PropertySource appSpecificDefaultParamsPs = new PropertySource(appSpecificDefaultParamsPsName,
APPLICATION_SPECIFIC_DEFAULT_PROPERTIES);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String appSpecificParamsPsName = "aws:ssm:parameter:/config/service/";
PropertySource appSpecificParamsPs = new PropertySource(appSpecificParamsPsName,
APPLICATION_SPECIFIC_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(
Arrays.asList(appSpecificDefaultParamsPs, sharedDefaultParamsPs, appSpecificParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithExistentApplicationAndNonExistentProfile() {
// Arrange
String application = "service";
String profile = randomAlphabetic(RandomUtils.nextInt(3, 33));
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String appSpecificParamsPsName = "aws:ssm:parameter:/config/service/";
PropertySource appSpecificParamsPs = new PropertySource(appSpecificParamsPsName,
APPLICATION_SPECIFIC_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(appSpecificParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithExistentApplicationAndExistentProfile() {
// Arrange
String application = "service";
String profile = "production";
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String appSpecificProdParamsPsName = "aws:ssm:parameter:/config/service-production/";
PropertySource appSpecificProdParamsPs = new PropertySource(appSpecificProdParamsPsName,
APPLICATION_SPECIFIC_PRODUCTION_PROPERTIES);
String sharedProdParamsPsName = "aws:ssm:parameter:/config/application-production/";
PropertySource sharedProdParamsPs = new PropertySource(sharedProdParamsPsName, SHARED_PRODUCTION_PROPERTIES);
String appSpecificParamsPsName = "aws:ssm:parameter:/config/service/";
PropertySource appSpecificParamsPs = new PropertySource(appSpecificParamsPsName,
APPLICATION_SPECIFIC_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(
Arrays.asList(appSpecificProdParamsPs, sharedProdParamsPs, appSpecificParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithExistentApplicationAndMultipleExistentProfiles() {
// Arrange
String application = "service";
String profile = configServerProperties.getDefaultProfile() + ",production";
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String appSpecificProdParamsPsName = "aws:ssm:parameter:/config/service-production/";
PropertySource appSpecificProdParamsPs = new PropertySource(appSpecificProdParamsPsName,
APPLICATION_SPECIFIC_PRODUCTION_PROPERTIES);
String sharedProdParamsPsName = "aws:ssm:parameter:/config/application-production/";
PropertySource sharedProdParamsPs = new PropertySource(sharedProdParamsPsName, SHARED_PRODUCTION_PROPERTIES);
String appSpecificDefaultParamsPsName = "aws:ssm:parameter:/config/service-default/";
PropertySource appSpecificDefaultParamsPs = new PropertySource(appSpecificDefaultParamsPsName,
APPLICATION_SPECIFIC_DEFAULT_PROPERTIES);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String appSpecificParamsPsName = "aws:ssm:parameter:/config/service/";
PropertySource appSpecificParamsPs = new PropertySource(appSpecificParamsPsName,
APPLICATION_SPECIFIC_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(appSpecificProdParamsPs, sharedProdParamsPs, appSpecificDefaultParamsPs,
sharedDefaultParamsPs, appSpecificParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithOverrides() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
Map<String, String> overrides = new HashMap<String, String>(4) {
{
put("logging.level.root", "boom");
put("logging.level.com.example.service", "boom");
put("spring.cache.redis.time-to-live", "-1");
}
};
configServerProperties.setOverrides(overrides);
PropertySource overridesPs = new PropertySource("overrides", overrides);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(overridesPs, sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithSlashesInTheParameterKeyPath() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected, true, false);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithPaginatedAwsSsmClientResponse() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
environmentProperties.setMaxResults(1);
String sharedDefaultParamsPsName = "aws:ssm:parameter:/config/application-default/";
PropertySource sharedDefaultParamsPs = new PropertySource(sharedDefaultParamsPsName, SHARED_DEFAULT_PROPERTIES);
String sharedParamsPsName = "aws:ssm:parameter:/config/application/";
PropertySource sharedParamsPs = new PropertySource(sharedParamsPsName, SHARED_PROPERTIES);
Environment expected = new Environment(application, profiles, null, null, null);
expected.addAll(Arrays.asList(sharedDefaultParamsPs, sharedParamsPs));
setupAwsSsmClientMocks(expected, false, true);
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void testFindOneWithNoParametersInThePaths() {
// Arrange
String application = configServerProperties.getDefaultApplicationName();
String profile = configServerProperties.getDefaultProfile();
String[] profiles = StringUtils.commaDelimitedListToStringArray(profile);
Environment expected = new Environment(application, profiles, null, null, null);
when(awsSsmClientMock.getParametersByPath(any(GetParametersByPathRequest.class)))
.thenReturn(new GetParametersByPathResult());
// Act
Environment result = repository.findOne(application, profile, null);
// Assert
assertThat(result).usingRecursiveComparison().withStrictTypeChecking().isEqualTo(expected);
}
@Test
public void factoryCustomizableWithRegion() {
AwsParameterStoreEnvironmentRepositoryFactory factory = new AwsParameterStoreEnvironmentRepositoryFactory(
new ConfigServerProperties());
AwsParameterStoreEnvironmentProperties properties = new AwsParameterStoreEnvironmentProperties();
properties.setRegion("us-east-1");
AwsParameterStoreEnvironmentRepository repository = factory.build(properties);
assertThat(repository).isNotNull();
}
@Test
public void factoryCustomizableWithRegionAndEndpoint() {
AwsParameterStoreEnvironmentRepositoryFactory factory = new AwsParameterStoreEnvironmentRepositoryFactory(
new ConfigServerProperties());
AwsParameterStoreEnvironmentProperties properties = new AwsParameterStoreEnvironmentProperties();
properties.setRegion("us-east-1");
properties.setEndpoint("https://myawsendpoint/");
AwsParameterStoreEnvironmentRepository repository = factory.build(properties);
assertThat(repository).isNotNull();
}
private void setupAwsSsmClientMocks(Environment environment) {
setupAwsSsmClientMocks(environment, false, false);
}
private void setupAwsSsmClientMocks(Environment environment, boolean withSlashesForPropertyName,
boolean paginatedResponse) {
for (PropertySource ps : environment.getPropertySources()) {
String path = StringUtils.delete(ps.getName(), environmentProperties.getOrigin());
GetParametersByPathRequest request = new GetParametersByPathRequest().withPath(path)
.withRecursive(environmentProperties.isRecursive())
.withWithDecryption(environmentProperties.isDecryptValues())
.withMaxResults(environmentProperties.getMaxResults());
Set<Parameter> parameters = getParameters(ps, path, withSlashesForPropertyName);
GetParametersByPathResult response = new GetParametersByPathResult().withParameters(parameters);
if (paginatedResponse && environmentProperties.getMaxResults() < parameters.size()) {
List<Set<Parameter>> chunks = splitParametersIntoChunks(parameters);
String nextToken = null;
for (int i = 0; i < chunks.size(); i++) {
Set<Parameter> chunk = chunks.get(i);
if (i == 0) {
nextToken = generateNextToken();
GetParametersByPathResult responseClone = response.clone().withParameters(chunk)
.withNextToken(nextToken);
when(awsSsmClientMock.getParametersByPath(eq(request))).thenReturn(responseClone);
}
else if (i == chunks.size() - 1) {
GetParametersByPathRequest requestClone = request.clone().withNextToken(nextToken);
GetParametersByPathResult responseClone = response.clone().withParameters(chunk);
when(awsSsmClientMock.getParametersByPath(eq(requestClone))).thenReturn(responseClone);
}
else {
String newNextToken = generateNextToken();
GetParametersByPathRequest requestClone = request.clone().withNextToken(nextToken);
GetParametersByPathResult responseClone = response.clone().withParameters(chunk)
.withNextToken(newNextToken);
when(awsSsmClientMock.getParametersByPath(eq(requestClone))).thenReturn(responseClone);
nextToken = newNextToken;
}
}
}
else {
when(awsSsmClientMock.getParametersByPath(eq(request))).thenReturn(response);
}
}
}
private Set<Parameter> getParameters(PropertySource propertySource, String path,
boolean withSlashesForPropertyName) {
Function<Map.Entry<?, ?>, Parameter> mapper = p -> new Parameter()
.withName(path + (withSlashesForPropertyName
? ((String) p.getKey()).replace(".", DEFAULT_PATH_SEPARATOR) : p.getKey()))
.withType(ParameterType.String).withValue((String) p.getValue()).withVersion(1L);
return propertySource.getSource().entrySet().stream().map(mapper).collect(Collectors.toSet());
}
private List<Set<Parameter>> splitParametersIntoChunks(Set<Parameter> parameters) {
AtomicInteger counter = new AtomicInteger();
Collector<Parameter, ?, Map<Integer, Set<Parameter>>> collector = Collectors
.groupingBy(p -> counter.getAndIncrement() / environmentProperties.getMaxResults(), Collectors.toSet());
return new ArrayList<>(parameters.stream().collect(collector).values());
}
private String generateNextToken() {
String random = randomAlphabetic(RandomUtils.nextInt(3, 33));
return Base64.getEncoder().encodeToString(random.getBytes(StandardCharsets.UTF_8));
}
}
| |
package com.j256.ormlite.sqlcipher.android.apptools;
import android.content.Context;
import android.content.res.Resources;
import com.j256.ormlite.dao.BaseDaoImpl;
import com.j256.ormlite.dao.DaoManager;
import com.j256.ormlite.logger.Logger;
import com.j256.ormlite.logger.LoggerFactory;
import net.sqlcipher.database.SQLiteOpenHelper;
import java.lang.reflect.Constructor;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
/**
* This helps organize and access database connections to optimize connection sharing. There are several schemes to
* manage the database connections in an Android app, but as an app gets more complicated, there are many potential
* places where database locks can occur. This class allows database connection sharing between multiple threads in a
* single app.
*
* This gets injected or called with the {@link OrmLiteSqliteOpenHelper} class that is used to manage the database
* connection. The helper instance will be kept in a static field and only released once its internal usage count goes
* to 0.
*
* The {@link SQLiteOpenHelper} and database classes maintain one connection under the hood, and prevent locks in the
* java code. Creating multiple connections can potentially be a source of trouble. This class shares the same
* connection instance between multiple clients, which will allow multiple activities and services to run at the same
* time.
*
* Every time you use the helper, you should call {@link #getHelper(Context)} or {@link #getHelper(Context, Class)}.
* When you are done with the helper you should call {@link #releaseHelper()}.
*
* @author graywatson, kevingalligan
*/
public class OpenHelperManager {
private static final String HELPER_CLASS_RESOURCE_NAME = "open_helper_classname";
private static Logger logger = LoggerFactory.getLogger(OpenHelperManager.class);
private static Class<? extends OrmLiteSqliteOpenHelper> helperClass = null;
private static volatile OrmLiteSqliteOpenHelper helper = null;
private static boolean wasClosed = false;
private static int instanceCount = 0;
/**
* If you are _not_ using the {@link OrmLiteBaseActivity} type classes then you will need to call this in a static
* method in your code.
*/
public static synchronized void setOpenHelperClass(Class<? extends OrmLiteSqliteOpenHelper> openHelperClass) {
if (openHelperClass == null) {
helperClass = null;
} else {
innerSetHelperClass(openHelperClass);
}
}
/**
* Set the helper for the manager. This is most likely used for testing purposes and should only be called if you
* _really_ know what you are doing. If you do use it then it should be in a static {} initializing block to make
* sure you have one helper instance for your application.
*/
public static synchronized void setHelper(OrmLiteSqliteOpenHelper helper) {
OpenHelperManager.helper = helper;
}
/**
* Create a static instance of our open helper from the helper class. This has a usage counter on it so make sure
* all calls to this method have an associated call to {@link #releaseHelper()}. This should be called during an
* onCreate() type of method when the application or service is starting. The caller should then keep the helper
* around until it is shutting down when {@link #releaseHelper()} should be called.
*/
public static synchronized <T extends OrmLiteSqliteOpenHelper> T getHelper(Context context, Class<T> openHelperClass) {
if (openHelperClass == null) {
throw new IllegalArgumentException("openHelperClass argument is null");
}
innerSetHelperClass(openHelperClass);
return loadHelper(context, openHelperClass);
}
/**
* Similar to {@link #getHelper(Context, Class)} (which is recommended) except we have to find the helper class
* through other means. This method requires that the Context be a class that extends one of ORMLite's Android base
* classes such as {@link OrmLiteBaseActivity}. Either that or the helper class needs to be set in the strings.xml.
*
* <p>
* To find the helper class, this does the following: <br />
* 1) If the class has been set with a call to {@link #setOpenHelperClass(Class)}, it will be used to construct a
* helper. <br />
* 2) If the resource class name is configured in the strings.xml file it will be used. <br />
* 3) The context class hierarchy is walked looking at the generic parameters for a class extending
* OrmLiteSqliteOpenHelper. This is used by the {@link OrmLiteBaseActivity} and other base classes. <br />
* 4) An exception is thrown saying that it was not able to set the helper class.
* </p>
*
* @deprecated Should use {@link #getHelper(Context, Class)}
*/
@Deprecated
public static synchronized OrmLiteSqliteOpenHelper getHelper(Context context) {
if (helperClass == null) {
if (context == null) {
throw new IllegalArgumentException("context argument is null");
}
Context appContext = context.getApplicationContext();
innerSetHelperClass(lookupHelperClass(appContext, context.getClass()));
}
return loadHelper(context, helperClass);
}
/**
* @deprecated This has been renamed to be {@link #releaseHelper()}.
*/
@Deprecated
public static void release() {
releaseHelper();
}
/**
* Release the helper that was previously returned by a call {@link #getHelper(Context)} or
* {@link #getHelper(Context, Class)}. This will decrement the usage counter and close the helper if the counter is
* 0.
*
* <p>
* <b> WARNING: </b> This should be called in an onDestroy() type of method when your application or service is
* terminating or if your code is no longer going to use the helper or derived DAOs in any way. _Don't_ call this
* method if you expect to call {@link #getHelper(Context)} again before the application terminates.
* </p>
*/
public static synchronized void releaseHelper() {
instanceCount--;
logger.trace("releasing helper {}, instance count = {}", helper, instanceCount);
if (instanceCount <= 0) {
if (helper != null) {
logger.trace("zero instances, closing helper {}", helper);
helper.close();
helper = null;
wasClosed = true;
}
if (instanceCount < 0) {
logger.error("too many calls to release helper, instance count = {}", instanceCount);
}
}
}
/**
* Set the helper class and make sure we aren't changing it to another class.
*/
private static void innerSetHelperClass(Class<? extends OrmLiteSqliteOpenHelper> openHelperClass) {
// make sure if that there are not 2 helper classes in an application
if (openHelperClass == null) {
throw new IllegalStateException("Helper class was trying to be reset to null");
} else if (helperClass == null) {
helperClass = openHelperClass;
} else if (helperClass != openHelperClass) {
throw new IllegalStateException("Helper class was " + helperClass + " but is trying to be reset to "
+ openHelperClass);
}
}
private static <T extends OrmLiteSqliteOpenHelper> T loadHelper(Context context, Class<T> openHelperClass) {
if (helper == null) {
if (wasClosed) {
// this can happen if you are calling get/release and then get again
logger.info("helper was already closed and is being re-opened");
}
if (context == null) {
throw new IllegalArgumentException("context argument is null");
}
Context appContext = context.getApplicationContext();
helper = constructHelper(appContext, openHelperClass);
logger.trace("zero instances, created helper {}", helper);
/*
* Filipe Leandro and I worked on this bug for like 10 hours straight. It's a doosey.
*
* Each ForeignCollection has internal DAO objects that are holding a ConnectionSource. Each Android
* ConnectionSource is tied to a particular database connection. What Filipe was seeing was that when all of
* his views we closed (onDestroy), but his application WAS NOT FULLY KILLED, the first View.onCreate()
* method would open a new connection to the database. Fine. But because he application was still in memory,
* the static BaseDaoImpl default cache had not been cleared and was containing cached objects with
* ForeignCollections. The ForeignCollections still had references to the DAOs that had been opened with old
* ConnectionSource objects and therefore the old database connection. Using those cached collections would
* cause exceptions saying that you were trying to work with a database that had already been close.
*
* Now, whenever we create a new helper object, we must make sure that the internal object caches have been
* fully cleared. This is a good lesson for anyone that is holding objects around after they have closed
* connections to the database or re-created the DAOs on a different connection somehow.
*/
BaseDaoImpl.clearAllInternalObjectCaches();
/*
* Might as well do this also since if the helper changes then the ConnectionSource will change so no one is
* going to have a cache hit on the old DAOs anyway. All they are doing is holding memory.
*
* NOTE: we don't want to clear the config map.
*/
DaoManager.clearDaoCache();
instanceCount = 0;
}
instanceCount++;
logger.trace("returning helper {}, instance count = {} ", helper, instanceCount);
@SuppressWarnings("unchecked")
T castHelper = (T) helper;
return castHelper;
}
/**
* Call the constructor on our helper class.
*/
private static OrmLiteSqliteOpenHelper constructHelper(Context context,
Class<? extends OrmLiteSqliteOpenHelper> openHelperClass) {
Constructor<?> constructor;
try {
constructor = openHelperClass.getConstructor(Context.class);
} catch (Exception e) {
throw new IllegalStateException(
"Could not find public constructor that has a single (Context) argument for helper class "
+ openHelperClass, e);
}
try {
return (OrmLiteSqliteOpenHelper) constructor.newInstance(context);
} catch (Exception e) {
throw new IllegalStateException("Could not construct instance of helper class " + openHelperClass, e);
}
}
/**
* Lookup the helper class either from the resource string or by looking for a generic parameter.
*/
private static Class<? extends OrmLiteSqliteOpenHelper> lookupHelperClass(Context context, Class<?> componentClass) {
// see if we have the magic resource class name set
Resources resources = context.getResources();
int resourceId = resources.getIdentifier(HELPER_CLASS_RESOURCE_NAME, "string", context.getPackageName());
if (resourceId != 0) {
String className = resources.getString(resourceId);
try {
@SuppressWarnings("unchecked")
Class<? extends OrmLiteSqliteOpenHelper> castClass =
(Class<? extends OrmLiteSqliteOpenHelper>) Class.forName(className);
return castClass;
} catch (Exception e) {
throw new IllegalStateException("Could not create helper instance for class " + className, e);
}
}
// try walking the context class to see if we can get the OrmLiteSqliteOpenHelper from a generic parameter
for (Class<?> componentClassWalk = componentClass; componentClassWalk != null; componentClassWalk =
componentClassWalk.getSuperclass()) {
Type superType = componentClassWalk.getGenericSuperclass();
if (superType == null || !(superType instanceof ParameterizedType)) {
continue;
}
// get the generic type arguments
Type[] types = ((ParameterizedType) superType).getActualTypeArguments();
// defense
if (types == null || types.length == 0) {
continue;
}
for (Type type : types) {
// defense
if (!(type instanceof Class)) {
continue;
}
Class<?> clazz = (Class<?>) type;
if (OrmLiteSqliteOpenHelper.class.isAssignableFrom(clazz)) {
@SuppressWarnings("unchecked")
Class<? extends OrmLiteSqliteOpenHelper> castOpenHelperClass =
(Class<? extends OrmLiteSqliteOpenHelper>) clazz;
return castOpenHelperClass;
}
}
}
throw new IllegalStateException(
"Could not find OpenHelperClass because none of the generic parameters of class " + componentClass
+ " extends OrmLiteSqliteOpenHelper. You should use getHelper(Context, Class) instead.");
}
}
| |
/**
* Entity Essentials -- A Component-based Entity System
*
* Copyright (C) 2017 Elmar Schug <elmar.schug@jayware.org>,
* Markus Neubauer <markus.neubauer@jayware.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jayware.e2.storage.impl;
import com.googlecode.concurentlocks.ReadWriteUpdateLock;
import com.googlecode.concurentlocks.ReentrantReadWriteUpdateLock;
import org.jayware.e2.component.api.Aspect;
import org.jayware.e2.context.api.Context;
import org.jayware.e2.context.api.Contextual;
import org.jayware.e2.context.api.Disposable;
import org.jayware.e2.entity.api.EntityEvent.CreateEntityEvent;
import org.jayware.e2.entity.api.EntityEvent.DeleteEntitiesEvent;
import org.jayware.e2.entity.api.EntityEvent.DeleteEntityEvent;
import org.jayware.e2.entity.api.EntityEvent.EntityCreatedEvent;
import org.jayware.e2.entity.api.EntityEvent.EntityDeletedEvent;
import org.jayware.e2.entity.api.EntityEvent.EntityDeletingEvent;
import org.jayware.e2.entity.api.EntityEvent.FindEntitiesEvent;
import org.jayware.e2.entity.api.EntityEvent.ResolveEntityEvent;
import org.jayware.e2.entity.api.EntityRef;
import org.jayware.e2.event.api.Event;
import org.jayware.e2.event.api.EventManager;
import org.jayware.e2.event.api.Handle;
import org.jayware.e2.event.api.Param;
import org.jayware.e2.event.api.Query;
import org.jayware.e2.storage.api.ComponentDatabase;
import org.jayware.e2.storage.api.EntityFinder;
import org.jayware.e2.storage.api.Storage;
import org.jayware.e2.storage.api.StorageException;
import org.jayware.e2.util.Filter;
import org.jayware.e2.util.Key;
import org.jayware.e2.util.ObjectUtil;
import org.jayware.e2.util.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.Lock;
import static org.jayware.e2.entity.api.EntityEvent.AspectParam;
import static org.jayware.e2.entity.api.EntityEvent.EntityIdParam;
import static org.jayware.e2.entity.api.EntityEvent.EntityRefListParam;
import static org.jayware.e2.entity.api.EntityEvent.EntityRefParam;
import static org.jayware.e2.entity.api.EntityEvent.FilterListParam;
import static org.jayware.e2.event.api.EventType.RootEvent.ContextParam;
import static org.jayware.e2.event.api.Parameters.param;
import static org.jayware.e2.event.api.Presence.Optional;
import static org.jayware.e2.util.Key.createKey;
public class StorageImpl
implements Storage, Disposable
{
public static final Key<Storage> STORAGE_KEY = createKey("org.jayware.e2.Storage");
private final Logger log = LoggerFactory.getLogger(StorageImpl.class);
private final Context myContext;
private final EventManager myEventManager;
private final EntityFinder myEntityFinder;
private final Map<UUID, EntityRef> myEntities;
private final ComponentDatabase myComponentDatabase;
private final Lock myReadLock;
private final Lock myUpdateLock;
private final Lock myWriteLock;
public StorageImpl(Context context, Map<UUID, EntityRef> entities, ComponentDatabase database)
{
myContext = context;
myEventManager = context.getService(EventManager.class);
myEntities = entities;
myComponentDatabase = database;
final ReadWriteUpdateLock myLock = new ReentrantReadWriteUpdateLock();
myReadLock = myLock.readLock();
myUpdateLock = myLock.updateLock();
myWriteLock = myLock.writeLock();
myEntityFinder = new EntityFinderImpl(context, new Provider<List<EntityRef>>()
{
@Override
public List<EntityRef> provide()
{
return new CopyOnWriteArrayList<EntityRef>();
}
});
}
@Handle(CreateEntityEvent.class)
public void handleCreateEntityEvent(Event event, @Param(value = EntityIdParam, presence = Optional) UUID id)
{
final UUID entityId = next(id);
EntityRef resultRef;
boolean fireEntityCreatedEvent = false;
myUpdateLock.lock();
try
{
resultRef = myEntities.get(entityId);
if (resultRef == null)
{
resultRef = new EntityRefImpl(entityId);
myWriteLock.lock();
try
{
myEntities.put(entityId, resultRef);
}
finally
{
myWriteLock.unlock();
}
fireEntityCreatedEvent = true;
}
}
finally
{
myUpdateLock.unlock();
}
if (fireEntityCreatedEvent)
{
postEntityCreatedEvent(resultRef);
}
if (event.isQuery())
{
((Query) event).result(EntityRefParam, resultRef);
}
}
@Handle(DeleteEntityEvent.class)
public void handleDeleteEntityEvent(Event event, @Param(value = EntityIdParam) UUID id)
{
boolean fireEntityDeletedEvent = false;
EntityRef ref;
myUpdateLock.lock();
try
{
ref = myEntities.get(id);
if (ref != null)
{
sendEntityDeletingEvent(ref);
myWriteLock.lock();
try
{
myComponentDatabase.clear(ref);
myEntities.remove(id);
}
finally
{
myWriteLock.unlock();
}
fireEntityDeletedEvent = true;
}
}
finally
{
myUpdateLock.unlock();
}
if (event.isQuery())
{
((Query) event).result(EntityRefParam, ref);
}
if (fireEntityDeletedEvent)
{
postEntityDeletedEvent(ref);
}
else
{
log.warn("The entity '{}' does not exist in '{}'!", id, myContext);
}
}
@Handle(DeleteEntitiesEvent.class)
public void handleDeleteEntitiesEvent(Event event)
{
final List<EntityRef> result;
myWriteLock.lock();
try
{
result = new CopyOnWriteArrayList<EntityRef>(myEntities.values());
for (EntityRef ref : result)
{
myEntities.remove(ref.getId());
}
}
finally
{
myWriteLock.unlock();
}
for (EntityRef ref : result)
{
postEntityDeletedEvent(ref);
}
if (event.isQuery())
{
((Query) event).result(EntityRefListParam, Collections.<EntityRef>unmodifiableList(result));
}
}
@Handle(FindEntitiesEvent.class)
public void handleFindEntityEvent(Query query, @Param(value = AspectParam, presence = Optional) Aspect aspect,
@Param(value = FilterListParam, presence = Optional) List<Filter<EntityRef>> filters)
{
List<EntityRef> result = Collections.<EntityRef>emptyList();
myReadLock.lock();
try
{
result = myEntityFinder.filter(myEntities.values(), aspect, filters);
}
catch (Exception e)
{
throw new StorageException("Failed to find entities!", e);
}
finally
{
myReadLock.unlock();
query.result(EntityRefListParam, Collections.<EntityRef>unmodifiableList(result));
}
}
@Handle(ResolveEntityEvent.class)
public void handleResolveEntityEvent(Query query, @Param(EntityIdParam) UUID id)
{
EntityRef resolvedEntity = null;
myReadLock.lock();
try
{
resolvedEntity = myEntities.get(id);
if (resolvedEntity == null)
{
resolvedEntity = new EntityRefImpl(id);
}
}
finally
{
myReadLock.unlock();
}
query.result(EntityRefParam, resolvedEntity);
}
@Override
public void dispose(Context context)
{
myEventManager.unsubscribe(myContext, this);
}
private void postEntityCreatedEvent(EntityRef ref)
{
myEventManager.post(EntityCreatedEvent.class,
param(ContextParam, myContext),
param(EntityRefParam, ref),
param(EntityIdParam, ref.getId())
);
}
private void postEntityDeletedEvent(EntityRef ref)
{
myEventManager.post(EntityDeletedEvent.class,
param(ContextParam, myContext),
param(EntityIdParam, ref.getId()),
param(EntityRefParam, ref)
);
}
private void sendEntityDeletingEvent(EntityRef ref)
{
myEventManager.send(EntityDeletingEvent.class,
param(ContextParam, myContext),
param(EntityRefParam, ref),
param(EntityIdParam, ref.getId())
);
}
private UUID next(UUID id)
{
return id != null ? id : UUID.randomUUID();
}
private class EntityRefImpl
implements EntityRef
{
private final UUID myId;
public EntityRefImpl(UUID id)
{
myId = id;
}
@Override
public Context getContext()
{
return myContext;
}
@Override
public UUID getId()
{
return myId;
}
@Override
public boolean belongsTo(Context context)
{
return myContext.equals(context);
}
@Override
public boolean belongsTo(Contextual contextual)
{
return contextual != null && myContext.equals(contextual.getContext());
}
@Override
public boolean isValid()
{
myReadLock.lock();
try
{
return !myContext.isDisposed() && myEntities.containsKey(myId);
}
finally
{
myReadLock.unlock();
}
}
@Override
public boolean isInvalid()
{
return !isValid();
}
@Override
public boolean equals(Object other)
{
if (this == other)
{
return true;
}
if (!(other instanceof EntityRef))
{
return false;
}
final EntityRef ref = (EntityRef) other;
return ObjectUtil.equals(myId, ref.getId()) && belongsTo(ref);
}
@Override
public int hashCode()
{
return ObjectUtil.hashCode(myContext, myId);
}
@Override
public String toString()
{
String toString =
"Ref { " + myId;
if (isInvalid())
{
toString += " | <invalid>";
}
return toString + " }";
}
}
}
| |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.server.config.parse;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.xmlbeans.XmlError;
import org.apache.xmlbeans.XmlOptions;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.kaazing.gateway.server.Launcher;
import org.kaazing.gateway.server.config.parse.translate.GatewayConfigTranslator;
import org.kaazing.gateway.server.config.parse.translate.GatewayConfigTranslatorFactory;
import org.kaazing.gateway.server.config.nov2015.ClusterType;
import org.kaazing.gateway.server.config.nov2015.GatewayConfigDocument;
import org.kaazing.gateway.server.config.nov2015.PropertiesType;
import org.kaazing.gateway.server.config.nov2015.PropertyType;
import org.kaazing.gateway.server.config.nov2015.SecurityType;
import org.kaazing.gateway.server.config.nov2015.ServiceDefaultsType;
import org.kaazing.gateway.server.config.nov2015.ServiceType;
import org.kaazing.gateway.util.parse.ConfigParameter;
import org.slf4j.Logger;
import org.xml.sax.Attributes;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.ext.DefaultHandler2;
import org.xml.sax.ext.Locator2;
import org.xml.sax.helpers.DefaultHandler;
public class GatewayConfigParser {
/**
* XSL stylesheet to be used before parsing. Adds xsi:type to login-module and service elements.
*/
private static final String GATEWAY_CONFIG_ANNOTATE_TYPES_XSL = "META-INF/gateway-config-annotate-types.xsl";
/**
* Charset string for XML prologue, must match {@link #CHARSET_OUTPUT}
*/
private static final String CHARSET_OUTPUT_XML = "UTF-16";
/**
* Charset string for output stream, must match {@link #CHARSET_OUTPUT_XML}
*/
private static final String CHARSET_OUTPUT = "UTF16";
/**
* Extension to add to translated/updated config files
*/
private static final String TRANSLATED_CONFIG_FILE_EXT = ".new";
private static final Logger LOGGER = Launcher.getGatewayStartupLogger();
private final Properties configuration;
public GatewayConfigParser() {
this(System.getProperties());
}
public GatewayConfigParser(Properties configuration) {
this.configuration = configuration;
}
private void translate(final GatewayConfigNamespace ns,
final Document dom,
final File translatedConfigFile,
boolean writeTranslatedFile)
throws Exception {
GatewayConfigTranslator translator = GatewayConfigTranslatorFactory.newInstance().getTranslator(ns);
translator.translate(dom);
if (writeTranslatedFile) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BufferedOutputStream bos = new BufferedOutputStream(baos);
Format outputFormat = Format.getPrettyFormat();
outputFormat.setLineSeparator(System.getProperty("line.separator"));
XMLOutputter xmlWriter = new XMLOutputter(outputFormat);
xmlWriter.output(dom, bos);
bos.close();
final String xml = baos.toString();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Translated gateway config XML:\n%s", xml));
}
// Write the translated DOM out to the given file
FileWriter fw = new FileWriter(translatedConfigFile);
BufferedWriter bw = new BufferedWriter(fw);
bw.write(xml);
bw.close();
}
}
private File getTranslatedConfigFile(final File configFile)
throws Exception {
// Build a DOM of the config file, so that we can easily sniff the
// namespace used. We then key off the namespace and attempt to
// Do The Right Thing(tm).
SAXBuilder xmlReader = new SAXBuilder();
Document dom = xmlReader.build(configFile);
Element root = dom.getRootElement();
GatewayConfigNamespace namespace = GatewayConfigNamespace.fromURI(root.getNamespace().getURI());
boolean writeTranslatedFile = !namespace.equals(GatewayConfigNamespace.CURRENT_NS);
File translatedConfigFile = writeTranslatedFile ?
new File(configFile.getParent(), configFile.getName()
+ TRANSLATED_CONFIG_FILE_EXT) : configFile;
translate(namespace, dom, translatedConfigFile, writeTranslatedFile);
return translatedConfigFile;
}
/**
* Parse and validate a gateway configuration file.
*
* @param configFile the configuration file
* @return GatewayConfig the parsed gateway configuration
* @throws Exception when a problem occurs
*/
public GatewayConfigDocument parse(final File configFile) throws Exception {
long time = 0;
if (LOGGER.isDebugEnabled()) {
time = System.currentTimeMillis();
}
// For errors and logging (KG-1379) we need to report the real config file name,
// which is not always 'gateway-config.xml'.
String configFileName = configFile.getName();
// Validate the gateway-config
GatewayConfigDocument config = null;
XmlOptions parseOptions = new XmlOptions();
parseOptions.setLoadLineNumbers();
parseOptions.setLoadLineNumbers(XmlOptions.LOAD_LINE_NUMBERS_END_ELEMENT);
parseOptions.setLoadStripWhitespace();
parseOptions.setLoadStripComments();
File translatedConfigFile;
try {
translatedConfigFile = getTranslatedConfigFile(configFile);
} catch (Exception e) {
Throwable rootCause = getRootCause(e);
if (rootCause == null) {
rootCause = e;
}
if (LOGGER.isDebugEnabled()) {
LOGGER.error("Error upgrading XML: " + rootCause, rootCause);
} else {
LOGGER.error("Error upgrading XML: " + rootCause);
}
// If it's not an IllegalArgumentException, wrap it in a
// GatewayConfigParserException
if (e instanceof IllegalArgumentException) {
throw e;
} else {
throw new GatewayConfigParserException(e.getMessage());
}
}
List<String> xmlParseErrors = new ArrayList<>();
try {
config = GatewayConfigDocument.Factory.parse(new FileInputStream(translatedConfigFile), parseOptions);
} catch (Exception e) {
// track the parse error so that we don't make the 2nd pass through the file
xmlParseErrors.add("Invalid XML: " + getRootCause(e).getMessage());
}
if (xmlParseErrors.isEmpty()) {
// The properties used in parameter substitution are now proper XMLBeans
// and should be injected after an initial parse
GatewayConfigDocument.GatewayConfig gatewayConfig = config.getGatewayConfig();
PropertiesType properties = gatewayConfig.getProperties();
Map<String, String> propertiesMap = new HashMap<>();
if (properties != null) {
for (PropertyType propertyType : properties.getPropertyArray()) {
propertiesMap.put(propertyType.getName(), propertyType.getValue());
}
}
// make a second pass through the file now, injecting the properties and performing XSL translations
InputStream xmlInjectedIn = new PipedInputStream();
OutputStream xmlInjectedOut = new PipedOutputStream((PipedInputStream) xmlInjectedIn);
ExecutorService xmlInjectedExecutor = Executors.newSingleThreadExecutor();
Future<Boolean> xmlInjectedFuture = xmlInjectedExecutor.submit(new XMLParameterInjector(new FileInputStream(
translatedConfigFile), xmlInjectedOut, propertiesMap, configuration, xmlParseErrors));
// trace injected xml
if (LOGGER.isTraceEnabled()) {
xmlInjectedIn = bufferToTraceLog(xmlInjectedIn,
"Gateway config file '" + configFileName + "' post parameter injection", LOGGER);
}
// Pass gateway-config through the pre-parse transformer
InputStream xmlTransformedIn = new PipedInputStream();
OutputStream xmlTransformedOut = new PipedOutputStream((PipedInputStream) xmlTransformedIn);
ExecutorService xmlTransformedExecutor = Executors.newSingleThreadExecutor();
Future<Boolean> xmlTransformedFuture = xmlTransformedExecutor.submit(
new XSLTransformer(xmlInjectedIn, xmlTransformedOut, GATEWAY_CONFIG_ANNOTATE_TYPES_XSL));
// trace transformed xml
if (LOGGER.isTraceEnabled()) {
xmlTransformedIn = bufferToTraceLog(xmlTransformedIn,
"Gateway config file '" + configFileName + "' post XSL transformation", LOGGER);
}
try {
config = GatewayConfigDocument.Factory.parse(xmlTransformedIn, parseOptions);
} catch (Exception e) {
// If parsing with previous namespace was also unsuccessful,
// process errors top down, failing fast, for user level errors
try {
if (xmlInjectedFuture.get()) {
if (xmlTransformedFuture.get()) {
throw e;
}
}
} catch (Exception n) {
xmlParseErrors.add("Invalid XML: " + getRootCause(n).getMessage());
}
} finally {
xmlInjectedFuture.cancel(true);
xmlInjectedExecutor.shutdownNow();
xmlTransformedFuture.cancel(true);
xmlTransformedExecutor.shutdownNow();
}
}
validateGatewayConfig(config, xmlParseErrors);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("parsed " + " gateway config file '" + configFileName + "' in [" + (System.currentTimeMillis() - time) +
" ms]");
}
return config;
}
/**
* Validate the parsed gateway configuration file.
*
* @param configDoc the XmlObject representing the gateway-config document
*/
private void validateGatewayConfig(GatewayConfigDocument configDoc, List<String> preProcessErrors) {
List<XmlError> errorList = new ArrayList<>();
for (String preProcessError : preProcessErrors) {
errorList.add(XmlError.forMessage(preProcessError, XmlError.SEVERITY_ERROR));
}
if (errorList.isEmpty()) {
XmlOptions validationOptions = new XmlOptions();
validationOptions.setLoadLineNumbers();
validationOptions.setLoadLineNumbers(XmlOptions.LOAD_LINE_NUMBERS_END_ELEMENT);
validationOptions.setErrorListener(errorList);
boolean valid = configDoc.validate(validationOptions);
if (valid) {
// Perform custom validations that aren't expressed in the XSD
GatewayConfigDocument.GatewayConfig config = configDoc.getGatewayConfig();
ServiceType[] services = config.getServiceArray();
if (services != null && services.length > 0) {
List<String> serviceNames = new ArrayList<>();
for (ServiceType service : services) {
String name = service.getName();
if (name == null || name.length() == 0) {
errorList.add(XmlError.forMessage("All services must have unique non-empty names",
XmlError.SEVERITY_ERROR));
} else if (serviceNames.indexOf(name) >= 0) {
errorList.add(XmlError
.forMessage("Service name must be unique. More than one service named '" + name + "'",
XmlError.SEVERITY_ERROR));
} else {
serviceNames.add(name);
}
}
}
SecurityType[] security = config.getSecurityArray();
if (security != null && security.length > 1) {
errorList.add(XmlError.forMessage("Multiple <security> elements found; only one allowed",
XmlError.SEVERITY_ERROR));
}
ServiceDefaultsType[] serviceDefaults = config.getServiceDefaultsArray();
if (serviceDefaults != null && serviceDefaults.length > 1) {
errorList.add(XmlError.forMessage("Multiple <service-defaults> elements found; only one allowed",
XmlError.SEVERITY_ERROR));
}
ClusterType[] clusterConfigs = config.getClusterArray();
if (clusterConfigs != null && clusterConfigs.length > 1) {
errorList.add(XmlError.forMessage("Multiple <cluster> elements found; only one allowed",
XmlError.SEVERITY_ERROR));
}
}
}
// Report all validation errors
if (errorList.size() > 0) {
String validationError = "Validation errors in gateway configuration file";
LOGGER.error(validationError);
for (XmlError error : errorList) {
int line = error.getLine();
if (line != -1) {
int column = error.getColumn();
if (column == -1) {
LOGGER.error(" Line: " + line);
} else {
LOGGER.error(" Line: " + line + " Column: " + column);
}
}
LOGGER.error(" " + error.getMessage().replaceAll("@" + GatewayConfigNamespace.CURRENT_NS, ""));
if (error.getMessage().contains("notify-options") || error.getMessage().contains("notify")) {
validationError = "Could not start because of references to APNs in the configuration."
+ " APNs is not supported in this version of the gateway, but will be added in a future release.";
LOGGER.error(validationError);
}
if (error.getMessage().contains("DataRateString")) {
// Yeah, it's crude, but customers are going to keep tripping over cases like 100KB/s being invalid otherwise
// Example output:
// ERROR - Validation errors in gateway configuration file
// ERROR - Line: 12 Column: 36
// ERROR - string value '1m' does not match pattern for DataRateString in namespace http://xmlns.kaazing
// .com/2012/08/gateway
// ERROR - (permitted data rate units are B/s, kB/s, KiB/s, kB/s, MB/s, and MiB/s)
// ERROR - <xml-fragment xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"/>
LOGGER.error(" " + "(permitted data rate units are B/s, kB/s, KiB/s, kB/s, MB/s, and MiB/s)");
}
if (error.getCursorLocation() != null) {
LOGGER.error(" " + error.getCursorLocation().xmlText());
}
}
throw new GatewayConfigParserException(validationError);
}
}
/**
* Get the root cause from a <code>Throwable</code> stack
*
* @param throwable
* @return
*/
private static Throwable getRootCause(Throwable throwable) {
List<Throwable> list = new ArrayList<>();
while (throwable != null && !list.contains(throwable)) {
list.add(throwable);
throwable = throwable.getCause();
}
return list.get(list.size() - 1);
}
/**
* Buffer a stream, flushing it to <code>log</code> and returning it as input
*
* @param input
* @param message
* @param log
* @return
*/
private static InputStream bufferToTraceLog(InputStream input, String message, Logger log) {
InputStream output;
try {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
int read;
byte[] data = new byte[16384];
while ((read = input.read(data, 0, data.length)) != -1) {
buffer.write(data, 0, read);
}
buffer.flush();
log.trace(message + "\n\n\n" + new String(buffer.toByteArray(), CHARSET_OUTPUT) + "\n\n\n");
output = new ByteArrayInputStream(buffer.toByteArray());
} catch (Exception e) {
throw new RuntimeException("could not buffer stream", e);
}
return output;
}
/**
* Count the number of new lines
*
* @param ch
* @param start
* @param length
*/
private static int countNewLines(char[] ch, int start, int length) {
int newLineCount = 0;
// quite reliable, since only Commodore 8-bit machines, TRS-80, Apple II family, Mac OS up to version 9 and OS-9
// use only '\r'
for (int i = start; i < length; i++) {
newLineCount = newLineCount + ((ch[i] == '\n') ? 1 : 0);
}
return newLineCount;
}
/**
* Inject resolved parameter values into XML stream
*/
private static final class XMLParameterInjector implements Callable<Boolean> {
private InputStream souceInput;
private OutputStreamWriter injectedOutput;
private Map<String, String> properties;
private Properties configuration;
private List<String> errors;
private int currentFlushedLine = 1;
public XMLParameterInjector(InputStream souceInput, OutputStream injectedOutput, Map<String, String> properties,
Properties configuration, List<String> errors)
throws UnsupportedEncodingException {
this.souceInput = souceInput;
this.injectedOutput = new OutputStreamWriter(injectedOutput, CHARSET_OUTPUT_XML);
this.properties = properties;
this.configuration = configuration;
this.errors = errors;
}
private void write(char[] ch, int start, int length) {
try {
currentFlushedLine += countNewLines(ch, start, length);
injectedOutput.write(ch, start, length);
injectedOutput.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void write(char[] ch) {
write(ch, 0, ch.length);
}
private void write(String s) {
write(s.toCharArray(), 0, s.length());
}
private void close() {
try {
souceInput.close();
injectedOutput.flush();
injectedOutput.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Parse the config file, resolving and injecting parameters encountered
*
* @return <code>true</code> if processed without errors, <code>false</code> otherwise
*/
@Override
public Boolean call() throws Exception {
try {
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
DefaultHandler handler = new DefaultHandler2() {
private Locator2 locator;
private void realignElement() {
String realignment = "";
for (int i = 0; i < locator.getLineNumber() - currentFlushedLine; i++) {
realignment += System.getProperty("line.separator");
}
write(realignment);
}
@Override
public void setDocumentLocator(Locator locator) {
this.locator = (Locator2) locator;
}
@Override
public void startDocument() throws SAXException {
write("<?xml version=\"1.0\" encoding=\"" + CHARSET_OUTPUT_XML + "\" ?>" +
System.getProperty("line.separator"));
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
realignElement();
String elementName = (localName == null || localName.equals("")) ? qName : localName;
write("<" + elementName);
if (attributes != null) {
for (int i = 0; i < attributes.getLength(); i++) {
String attributeName = (attributes.getLocalName(i) == null || attributes
.getLocalName(i).equals("")) ? attributes.getQName(i) : attributes
.getLocalName(i);
write(" " + attributeName + "=\"");
char[] attributeValue = attributes.getValue(i).toCharArray();
write(ConfigParameter.resolveAndReplace(attributeValue, 0,
attributeValue.length, properties, configuration, errors) + "\"");
}
}
write(new char[]{'>'});
}
@Override
public void comment(char[] ch, int start, int length) throws SAXException {
write("<!--");
write(ch, start, length);
write("-->");
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
write(ch, start, length);
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
write(ConfigParameter.resolveAndReplace(ch, start, length, properties, configuration, errors));
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
realignElement();
String elementName = (localName == null || localName.equals("")) ? qName : localName;
write("</" + elementName + ">");
}
};
parser.getXMLReader().setProperty("http://xml.org/sax/properties/lexical-handler", handler);
parser.getXMLReader().setProperty("http://apache.org/xml/properties/input-buffer-size",
souceInput.available());
parser.parse(souceInput, handler);
} finally {
close();
}
return errors.size() == 0;
}
}
/**
* XSL Transformer.
*/
private static final class XSLTransformer implements Callable<Boolean> {
private InputStream streamToTransform;
private OutputStream transformerOutput;
private String stylesheet;
/**
* Constructor.
*
* @param streamToTransform the gateway configuration file to transform
* @param transformerOutput the output stream to be used for transformed output
*/
public XSLTransformer(InputStream streamToTransform, OutputStream transformerOutput, String stylesheet) {
this.streamToTransform = streamToTransform;
this.transformerOutput = transformerOutput;
this.stylesheet = stylesheet;
}
/**
* Transform the gateway configuration file using the stylesheet.
*
* @return <code>true</code> if processed without errors, <code>false</code> otherwise
*/
@Override
public Boolean call() throws Exception {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
URL resource = classLoader.getResource(stylesheet);
InputStream xslIn = resource.openStream();
try {
Source xmlSource = new StreamSource(streamToTransform);
Source xslSource = new StreamSource(xslIn);
Result xmlResult = new StreamResult(transformerOutput);
Transformer transformer = TransformerFactory.newInstance().newTransformer(xslSource);
transformer.setOutputProperty(OutputKeys.ENCODING, CHARSET_OUTPUT_XML);
transformer.setErrorListener(new ErrorListener() {
@Override
public void warning(TransformerException exception) throws TransformerException {
throw exception;
}
@Override
public void fatalError(TransformerException exception) throws TransformerException {
throw exception;
}
@Override
public void error(TransformerException exception) throws TransformerException {
throw exception;
}
});
transformer.transform(xmlSource, xmlResult);
} finally {
transformerOutput.flush();
transformerOutput.close();
xslIn.close();
}
return Boolean.TRUE;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.avatica.util.ByteString;
import org.apache.calcite.avatica.util.DateTimeUtils;
import org.apache.calcite.avatica.util.TimeUnitRange;
import org.apache.calcite.runtime.SqlFunctions;
import org.apache.calcite.runtime.Utilities;
import org.junit.Test;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.apache.calcite.avatica.util.DateTimeUtils.EPOCH_JULIAN;
import static org.apache.calcite.avatica.util.DateTimeUtils.dateStringToUnixDate;
import static org.apache.calcite.avatica.util.DateTimeUtils.digitCount;
import static org.apache.calcite.avatica.util.DateTimeUtils.floorDiv;
import static org.apache.calcite.avatica.util.DateTimeUtils.floorMod;
import static org.apache.calcite.avatica.util.DateTimeUtils.intervalDayTimeToString;
import static org.apache.calcite.avatica.util.DateTimeUtils.intervalYearMonthToString;
import static org.apache.calcite.avatica.util.DateTimeUtils.timeStringToUnixDate;
import static org.apache.calcite.avatica.util.DateTimeUtils.timestampStringToUnixDate;
import static org.apache.calcite.avatica.util.DateTimeUtils.unixDateExtract;
import static org.apache.calcite.avatica.util.DateTimeUtils.unixDateToString;
import static org.apache.calcite.avatica.util.DateTimeUtils.unixTimeToString;
import static org.apache.calcite.avatica.util.DateTimeUtils.unixTimestamp;
import static org.apache.calcite.avatica.util.DateTimeUtils.unixTimestampToString;
import static org.apache.calcite.avatica.util.DateTimeUtils.ymdToJulian;
import static org.apache.calcite.avatica.util.DateTimeUtils.ymdToUnixDate;
import static org.apache.calcite.runtime.SqlFunctions.addMonths;
import static org.apache.calcite.runtime.SqlFunctions.charLength;
import static org.apache.calcite.runtime.SqlFunctions.concat;
import static org.apache.calcite.runtime.SqlFunctions.greater;
import static org.apache.calcite.runtime.SqlFunctions.initcap;
import static org.apache.calcite.runtime.SqlFunctions.lesser;
import static org.apache.calcite.runtime.SqlFunctions.lower;
import static org.apache.calcite.runtime.SqlFunctions.ltrim;
import static org.apache.calcite.runtime.SqlFunctions.rtrim;
import static org.apache.calcite.runtime.SqlFunctions.subtractMonths;
import static org.apache.calcite.runtime.SqlFunctions.trim;
import static org.apache.calcite.runtime.SqlFunctions.upper;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* Unit test for the methods in {@link SqlFunctions} that implement SQL
* functions.
*/
public class SqlFunctionsTest {
@Test public void testCharLength() {
assertEquals(3, charLength("xyz"));
}
@Test public void testConcat() {
assertEquals("a bcd", concat("a b", "cd"));
// The code generator will ensure that nulls are never passed in. If we
// pass in null, it is treated like the string "null", as the following
// tests show. Not the desired behavior for SQL.
assertEquals("anull", concat("a", null));
assertEquals("nullnull", concat((String) null, null));
assertEquals("nullb", concat(null, "b"));
}
@Test public void testLower() {
assertEquals("a bcd", lower("A bCd"));
}
@Test public void testUpper() {
assertEquals("A BCD", upper("A bCd"));
}
@Test public void testInitcap() {
assertEquals("Aa", initcap("aA"));
assertEquals("Zz", initcap("zz"));
assertEquals("Az", initcap("AZ"));
assertEquals("Try A Little ", initcap("tRy a littlE "));
assertEquals("Won'T It?No", initcap("won't it?no"));
assertEquals("1a", initcap("1A"));
assertEquals(" B0123b", initcap(" b0123B"));
}
@Test public void testLesser() {
assertEquals("a", lesser("a", "bc"));
assertEquals("ac", lesser("bc", "ac"));
try {
Object o = lesser("a", null);
fail("Expected NPE, got " + o);
} catch (NullPointerException e) {
// ok
}
assertEquals("a", lesser(null, "a"));
assertNull(lesser((String) null, null));
}
@Test public void testGreater() {
assertEquals("bc", greater("a", "bc"));
assertEquals("bc", greater("bc", "ac"));
try {
Object o = greater("a", null);
fail("Expected NPE, got " + o);
} catch (NullPointerException e) {
// ok
}
assertEquals("a", greater(null, "a"));
assertNull(greater((String) null, null));
}
/** Test for {@link SqlFunctions#rtrim}. */
@Test public void testRtrim() {
assertEquals("", rtrim(""));
assertEquals("", rtrim(" "));
assertEquals(" x", rtrim(" x "));
assertEquals(" x", rtrim(" x "));
assertEquals(" x y", rtrim(" x y "));
assertEquals(" x", rtrim(" x"));
assertEquals("x", rtrim("x"));
}
/** Test for {@link SqlFunctions#ltrim}. */
@Test public void testLtrim() {
assertEquals("", ltrim(""));
assertEquals("", ltrim(" "));
assertEquals("x ", ltrim(" x "));
assertEquals("x ", ltrim(" x "));
assertEquals("x y ", ltrim("x y "));
assertEquals("x", ltrim(" x"));
assertEquals("x", ltrim("x"));
}
/** Test for {@link SqlFunctions#trim}. */
@Test public void testTrim() {
assertEquals("", trimSpacesBoth(""));
assertEquals("", trimSpacesBoth(" "));
assertEquals("x", trimSpacesBoth(" x "));
assertEquals("x", trimSpacesBoth(" x "));
assertEquals("x y", trimSpacesBoth(" x y "));
assertEquals("x", trimSpacesBoth(" x"));
assertEquals("x", trimSpacesBoth("x"));
}
static String trimSpacesBoth(String s) {
return trim(true, true, " ", s);
}
@Test public void testUnixDateToString() {
// Verify these using the "date" command. E.g.
// $ date -u --date="@$(expr 10957 \* 86400)"
// Sat Jan 1 00:00:00 UTC 2000
assertEquals("2000-01-01", unixDateToString(10957));
assertEquals("1970-01-01", unixDateToString(0));
assertEquals("1970-01-02", unixDateToString(1));
assertEquals("1971-01-01", unixDateToString(365));
assertEquals("1972-01-01", unixDateToString(730));
assertEquals("1972-02-28", unixDateToString(788));
assertEquals("1972-02-29", unixDateToString(789));
assertEquals("1972-03-01", unixDateToString(790));
assertEquals("1969-01-01", unixDateToString(-365));
assertEquals("2000-01-01", unixDateToString(10957));
assertEquals("2000-02-28", unixDateToString(11015));
assertEquals("2000-02-29", unixDateToString(11016));
assertEquals("2000-03-01", unixDateToString(11017));
assertEquals("1900-01-01", unixDateToString(-25567));
assertEquals("1900-02-28", unixDateToString(-25509));
assertEquals("1900-03-01", unixDateToString(-25508));
assertEquals("1945-02-24", unixDateToString(-9077));
}
@Test public void testYmdToUnixDate() {
assertEquals(0, ymdToUnixDate(1970, 1, 1));
assertEquals(365, ymdToUnixDate(1971, 1, 1));
assertEquals(-365, ymdToUnixDate(1969, 1, 1));
assertEquals(11015, ymdToUnixDate(2000, 2, 28));
assertEquals(11016, ymdToUnixDate(2000, 2, 29));
assertEquals(11017, ymdToUnixDate(2000, 3, 1));
assertEquals(-9077, ymdToUnixDate(1945, 2, 24));
assertEquals(-25509, ymdToUnixDate(1900, 2, 28));
assertEquals(-25508, ymdToUnixDate(1900, 3, 1));
}
@Test public void testDateToString() {
checkDateString("1970-01-01", 0);
//noinspection PointlessArithmeticExpression
checkDateString("1971-02-03", 0 + 365 + 31 + (3 - 1));
//noinspection PointlessArithmeticExpression
checkDateString("1971-02-28", 0 + 365 + 31 + (28 - 1));
//noinspection PointlessArithmeticExpression
checkDateString("1971-03-01", 0 + 365 + 31 + 28 + (1 - 1));
//noinspection PointlessArithmeticExpression
checkDateString("1972-02-28", 0 + 365 * 2 + 31 + (28 - 1));
//noinspection PointlessArithmeticExpression
checkDateString("1972-02-29", 0 + 365 * 2 + 31 + (29 - 1));
//noinspection PointlessArithmeticExpression
checkDateString("1972-03-01", 0 + 365 * 2 + 31 + 29 + (1 - 1));
}
private void checkDateString(String s, int d) {
assertThat(unixDateToString(d), equalTo(s));
assertThat(dateStringToUnixDate(s), equalTo(d));
}
@Test public void testTimeToString() {
checkTimeString("00:00:00", 0);
checkTimeString("23:59:59", 86400000 - 1000);
}
private void checkTimeString(String s, int d) {
assertThat(unixTimeToString(d), equalTo(s));
assertThat(timeStringToUnixDate(s), equalTo(d));
}
@Test public void testTimestampToString() {
// ISO format would be "1970-01-01T00:00:00" but SQL format is different
checkTimestampString("1970-01-01 00:00:00", 0L);
checkTimestampString("1970-02-01 23:59:59", 86400000L * 32L - 1000L);
}
private void checkTimestampString(String s, long d) {
assertThat(unixTimestampToString(d), equalTo(s));
assertThat(timestampStringToUnixDate(s), equalTo(d));
}
@Test public void testIntervalYearMonthToString() {
TimeUnitRange range = TimeUnitRange.YEAR_TO_MONTH;
assertEquals("+0-00", intervalYearMonthToString(0, range));
assertEquals("+1-00", intervalYearMonthToString(12, range));
assertEquals("+1-01", intervalYearMonthToString(13, range));
assertEquals("-1-01", intervalYearMonthToString(-13, range));
}
@Test public void testIntervalDayTimeToString() {
assertEquals("+0", intervalYearMonthToString(0, TimeUnitRange.YEAR));
assertEquals("+0-00",
intervalYearMonthToString(0, TimeUnitRange.YEAR_TO_MONTH));
assertEquals("+0", intervalYearMonthToString(0, TimeUnitRange.MONTH));
assertEquals("+0", intervalDayTimeToString(0, TimeUnitRange.DAY, 0));
assertEquals("+0 00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_HOUR, 0));
assertEquals("+0 00:00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_MINUTE, 0));
assertEquals("+0 00:00:00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_SECOND, 0));
assertEquals("+0", intervalDayTimeToString(0, TimeUnitRange.HOUR, 0));
assertEquals("+0:00",
intervalDayTimeToString(0, TimeUnitRange.HOUR_TO_MINUTE, 0));
assertEquals("+0:00:00",
intervalDayTimeToString(0, TimeUnitRange.HOUR_TO_SECOND, 0));
assertEquals("+0",
intervalDayTimeToString(0, TimeUnitRange.MINUTE, 0));
assertEquals("+0:00",
intervalDayTimeToString(0, TimeUnitRange.MINUTE_TO_SECOND, 0));
assertEquals("+0",
intervalDayTimeToString(0, TimeUnitRange.SECOND, 0));
}
@Test public void testYmdToJulian() {
// All checked using http://aa.usno.navy.mil/data/docs/JulianDate.php.
// We round up - if JulianDate.php gives 2451544.5, we use 2451545.
assertThat(ymdToJulian(2014, 4, 3), equalTo(2456751));
// 2000 is a leap year
assertThat(ymdToJulian(2000, 1, 1), equalTo(2451545));
assertThat(ymdToJulian(2000, 2, 28), equalTo(2451603));
assertThat(ymdToJulian(2000, 2, 29), equalTo(2451604));
assertThat(ymdToJulian(2000, 3, 1), equalTo(2451605));
assertThat(ymdToJulian(1970, 1, 1), equalTo(2440588));
assertThat(ymdToJulian(1970, 1, 1), equalTo(EPOCH_JULIAN));
assertThat(ymdToJulian(1901, 1, 1), equalTo(2415386));
// 1900 is not a leap year
assertThat(ymdToJulian(1900, 10, 17), equalTo(2415310));
assertThat(ymdToJulian(1900, 3, 1), equalTo(2415080));
assertThat(ymdToJulian(1900, 2, 28), equalTo(2415079));
assertThat(ymdToJulian(1900, 2, 1), equalTo(2415052));
assertThat(ymdToJulian(1900, 1, 1), equalTo(2415021));
assertThat(ymdToJulian(1777, 7, 4), equalTo(2370281));
// 2016 is a leap year
assertThat(ymdToJulian(2016, 2, 28), equalTo(2457447));
assertThat(ymdToJulian(2016, 2, 29), equalTo(2457448));
assertThat(ymdToJulian(2016, 3, 1), equalTo(2457449));
}
@Test public void testExtract() {
assertThat(unixDateExtract(TimeUnitRange.YEAR, 0), equalTo(1970L));
assertThat(unixDateExtract(TimeUnitRange.YEAR, -1), equalTo(1969L));
assertThat(unixDateExtract(TimeUnitRange.YEAR, 364), equalTo(1970L));
assertThat(unixDateExtract(TimeUnitRange.YEAR, 365), equalTo(1971L));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 0), equalTo(1L));
assertThat(unixDateExtract(TimeUnitRange.MONTH, -1), equalTo(12L));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 364), equalTo(12L));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 365), equalTo(1L));
thereAndBack(1900, 1, 1);
thereAndBack(1900, 2, 28); // no leap day
thereAndBack(1900, 3, 1);
thereAndBack(1901, 1, 1);
thereAndBack(1901, 2, 28); // no leap day
thereAndBack(1901, 3, 1);
thereAndBack(2000, 1, 1);
thereAndBack(2000, 2, 28);
thereAndBack(2000, 2, 29); // leap day
thereAndBack(2000, 3, 1);
thereAndBack(1964, 1, 1);
thereAndBack(1964, 2, 28);
thereAndBack(1964, 2, 29); // leap day
thereAndBack(1964, 3, 1);
thereAndBack(1864, 1, 1);
thereAndBack(1864, 2, 28);
thereAndBack(1864, 2, 29); // leap day
thereAndBack(1864, 3, 1);
thereAndBack(1900, 1, 1);
thereAndBack(1900, 2, 28);
thereAndBack(1900, 3, 1);
thereAndBack(2004, 2, 28);
thereAndBack(2004, 2, 29); // leap day
thereAndBack(2004, 3, 1);
thereAndBack(2005, 2, 28); // no leap day
thereAndBack(2005, 3, 1);
}
private void thereAndBack(int year, int month, int day) {
final int unixDate = ymdToUnixDate(year, month, day);
assertThat(unixDateExtract(TimeUnitRange.YEAR, unixDate),
equalTo((long) year));
assertThat(unixDateExtract(TimeUnitRange.MONTH, unixDate),
equalTo((long) month));
assertThat(unixDateExtract(TimeUnitRange.DAY, unixDate),
equalTo((long) day));
}
@Test public void testAddMonths() {
checkAddMonths(2016, 1, 1, 2016, 2, 1, 1);
checkAddMonths(2016, 1, 1, 2017, 1, 1, 12);
checkAddMonths(2016, 1, 1, 2017, 2, 1, 13);
checkAddMonths(2016, 1, 1, 2015, 1, 1, -12);
checkAddMonths(2016, 1, 1, 2018, 10, 1, 33);
checkAddMonths(2016, 1, 31, 2016, 5, 1, 3); // roll up
checkAddMonths(2016, 4, 30, 2016, 7, 30, 3); // roll up
checkAddMonths(2016, 1, 31, 2016, 3, 1, 1);
checkAddMonths(2016, 3, 31, 2016, 3, 1, -1);
checkAddMonths(2016, 3, 31, 2116, 3, 31, 1200);
checkAddMonths(2016, 2, 28, 2116, 2, 28, 1200);
}
private void checkAddMonths(int y0, int m0, int d0, int y1, int m1, int d1,
int months) {
final int date0 = ymdToUnixDate(y0, m0, d0);
final long date = addMonths(date0, months);
final int date1 = ymdToUnixDate(y1, m1, d1);
assertThat((int) date, is(date1));
assertThat(subtractMonths(date1, date0),
anyOf(is(months), is(months + 1)));
assertThat(subtractMonths(date1 + 1, date0),
anyOf(is(months), is(months + 1)));
assertThat(subtractMonths(date1, date0 + 1),
anyOf(is(months), is(months - 1)));
assertThat(subtractMonths(d2ts(date1, 1), d2ts(date0, 0)),
anyOf(is(months), is(months + 1)));
assertThat(subtractMonths(d2ts(date1, 0), d2ts(date0, 1)),
anyOf(is(months - 1), is(months), is(months + 1)));
}
/** Converts a date (days since epoch) and milliseconds (since midnight)
* into a timestamp (milliseconds since epoch). */
private long d2ts(int date, int millis) {
return date * DateTimeUtils.MILLIS_PER_DAY + millis;
}
@Test public void testUnixTimestamp() {
assertThat(unixTimestamp(1970, 1, 1, 0, 0, 0), is(0L));
final long day = 86400000L;
assertThat(unixTimestamp(1970, 1, 2, 0, 0, 0), is(day));
assertThat(unixTimestamp(1970, 1, 1, 23, 59, 59), is(86399000L));
// 1900 is not a leap year
final long y1900 = -2203977600000L;
assertThat(unixTimestamp(1900, 2, 28, 0, 0, 0), is(y1900));
assertThat(unixTimestamp(1900, 3, 1, 0, 0, 0), is(y1900 + day));
// 2000 is a leap year
final long y2k = 951696000000L;
assertThat(unixTimestamp(2000, 2, 28, 0, 0, 0), is(y2k));
assertThat(unixTimestamp(2000, 2, 29, 0, 0, 0), is(y2k + day));
assertThat(unixTimestamp(2000, 3, 1, 0, 0, 0), is(y2k + day + day));
// 2016 is a leap year
final long y2016 = 1456617600000L;
assertThat(unixTimestamp(2016, 2, 28, 0, 0, 0), is(y2016));
assertThat(unixTimestamp(2016, 2, 29, 0, 0, 0), is(y2016 + day));
assertThat(unixTimestamp(2016, 3, 1, 0, 0, 0), is(y2016 + day + day));
}
@Test public void testFloor() {
checkFloor(0, 10, 0);
checkFloor(27, 10, 20);
checkFloor(30, 10, 30);
checkFloor(-30, 10, -30);
checkFloor(-27, 10, -30);
}
private void checkFloor(int x, int y, int result) {
assertThat(SqlFunctions.floor(x, y), is(result));
assertThat(SqlFunctions.floor((long) x, (long) y), is((long) result));
assertThat(SqlFunctions.floor((short) x, (short) y), is((short) result));
assertThat(SqlFunctions.floor((byte) x, (byte) y), is((byte) result));
assertThat(
SqlFunctions.floor(BigDecimal.valueOf(x), BigDecimal.valueOf(y)),
is(BigDecimal.valueOf(result)));
}
@Test public void testCeil() {
checkCeil(0, 10, 0);
checkCeil(27, 10, 30);
checkCeil(30, 10, 30);
checkCeil(-30, 10, -30);
checkCeil(-27, 10, -20);
checkCeil(-27, 1, -27);
}
private void checkCeil(int x, int y, int result) {
assertThat(SqlFunctions.ceil(x, y), is(result));
assertThat(SqlFunctions.ceil((long) x, (long) y), is((long) result));
assertThat(SqlFunctions.ceil((short) x, (short) y), is((short) result));
assertThat(SqlFunctions.ceil((byte) x, (byte) y), is((byte) result));
assertThat(
SqlFunctions.ceil(BigDecimal.valueOf(x), BigDecimal.valueOf(y)),
is(BigDecimal.valueOf(result)));
}
/** Unit test for
* {@link Utilities#compare(java.util.List, java.util.List)}. */
@Test public void testCompare() {
final List<String> ac = Arrays.asList("a", "c");
final List<String> abc = Arrays.asList("a", "b", "c");
final List<String> a = Collections.singletonList("a");
final List<String> empty = Collections.emptyList();
assertEquals(0, Utilities.compare(ac, ac));
assertEquals(0, Utilities.compare(ac, new ArrayList<>(ac)));
assertEquals(-1, Utilities.compare(a, ac));
assertEquals(-1, Utilities.compare(empty, ac));
assertEquals(1, Utilities.compare(ac, a));
assertEquals(1, Utilities.compare(ac, abc));
assertEquals(1, Utilities.compare(ac, empty));
assertEquals(0, Utilities.compare(empty, empty));
}
@Test public void testTruncateLong() {
assertEquals(12000L, SqlFunctions.truncate(12345L, 1000L));
assertEquals(12000L, SqlFunctions.truncate(12000L, 1000L));
assertEquals(12000L, SqlFunctions.truncate(12001L, 1000L));
assertEquals(11000L, SqlFunctions.truncate(11999L, 1000L));
assertEquals(-13000L, SqlFunctions.truncate(-12345L, 1000L));
assertEquals(-12000L, SqlFunctions.truncate(-12000L, 1000L));
assertEquals(-13000L, SqlFunctions.truncate(-12001L, 1000L));
assertEquals(-12000L, SqlFunctions.truncate(-11999L, 1000L));
}
@Test public void testTruncateInt() {
assertEquals(12000, SqlFunctions.truncate(12345, 1000));
assertEquals(12000, SqlFunctions.truncate(12000, 1000));
assertEquals(12000, SqlFunctions.truncate(12001, 1000));
assertEquals(11000, SqlFunctions.truncate(11999, 1000));
assertEquals(-13000, SqlFunctions.truncate(-12345, 1000));
assertEquals(-12000, SqlFunctions.truncate(-12000, 1000));
assertEquals(-13000, SqlFunctions.truncate(-12001, 1000));
assertEquals(-12000, SqlFunctions.truncate(-11999, 1000));
assertEquals(12000, SqlFunctions.round(12345, 1000));
assertEquals(13000, SqlFunctions.round(12845, 1000));
assertEquals(-12000, SqlFunctions.round(-12345, 1000));
assertEquals(-13000, SqlFunctions.round(-12845, 1000));
}
@Test public void testByteString() {
final byte[] bytes = {(byte) 0xAB, (byte) 0xFF};
final ByteString byteString = new ByteString(bytes);
assertEquals(2, byteString.length());
assertEquals("abff", byteString.toString());
assertEquals("abff", byteString.toString(16));
assertEquals("1010101111111111", byteString.toString(2));
final ByteString emptyByteString = new ByteString(new byte[0]);
assertEquals(0, emptyByteString.length());
assertEquals("", emptyByteString.toString());
assertEquals("", emptyByteString.toString(16));
assertEquals("", emptyByteString.toString(2));
assertEquals(emptyByteString, ByteString.EMPTY);
assertEquals("ff", byteString.substring(1, 2).toString());
assertEquals("abff", byteString.substring(0, 2).toString());
assertEquals("", byteString.substring(2, 2).toString());
// Add empty string, get original string back
assertSame(byteString.concat(emptyByteString), byteString);
final ByteString byteString1 = new ByteString(new byte[]{(byte) 12});
assertEquals("abff0c", byteString.concat(byteString1).toString());
final byte[] bytes3 = {(byte) 0xFF};
final ByteString byteString3 = new ByteString(bytes3);
assertEquals(0, byteString.indexOf(emptyByteString));
assertEquals(-1, byteString.indexOf(byteString1));
assertEquals(1, byteString.indexOf(byteString3));
assertEquals(-1, byteString3.indexOf(byteString));
thereAndBack(bytes);
thereAndBack(emptyByteString.getBytes());
thereAndBack(new byte[]{10, 0, 29, -80});
assertThat(ByteString.of("ab12", 16).toString(16), equalTo("ab12"));
assertThat(ByteString.of("AB0001DdeAD3", 16).toString(16),
equalTo("ab0001ddead3"));
assertThat(ByteString.of("", 16), equalTo(emptyByteString));
try {
ByteString x = ByteString.of("ABg0", 16);
fail("expected error, got " + x);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("invalid hex character: g"));
}
try {
ByteString x = ByteString.of("ABC", 16);
fail("expected error, got " + x);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("hex string has odd length"));
}
final byte[] bytes4 = {10, 0, 1, -80};
final ByteString byteString4 = new ByteString(bytes4);
final byte[] bytes5 = {10, 0, 1, 127};
final ByteString byteString5 = new ByteString(bytes5);
final ByteString byteString6 = new ByteString(bytes4);
assertThat(byteString4.compareTo(byteString5) > 0, is(true));
assertThat(byteString4.compareTo(byteString6) == 0, is(true));
assertThat(byteString5.compareTo(byteString4) < 0, is(true));
}
private void thereAndBack(byte[] bytes) {
final ByteString byteString = new ByteString(bytes);
final byte[] bytes2 = byteString.getBytes();
assertThat(bytes, equalTo(bytes2));
final String base64String = byteString.toBase64String();
final ByteString byteString1 = ByteString.ofBase64(base64String);
assertThat(byteString, equalTo(byteString1));
}
@Test public void testEasyLog10() {
assertEquals(1, digitCount(0));
assertEquals(1, digitCount(1));
assertEquals(1, digitCount(9));
assertEquals(2, digitCount(10));
assertEquals(2, digitCount(11));
assertEquals(2, digitCount(99));
assertEquals(3, digitCount(100));
}
@Test public void testFloorDiv() {
assertThat(floorDiv(13, 3), equalTo(4L));
assertThat(floorDiv(12, 3), equalTo(4L));
assertThat(floorDiv(11, 3), equalTo(3L));
assertThat(floorDiv(-13, 3), equalTo(-5L));
assertThat(floorDiv(-12, 3), equalTo(-4L));
assertThat(floorDiv(-11, 3), equalTo(-4L));
assertThat(floorDiv(0, 3), equalTo(0L));
assertThat(floorDiv(1, 3), equalTo(0L));
assertThat(floorDiv(-1, 3), is(-1L));
}
@Test public void testFloorMod() {
assertThat(floorMod(13, 3), is(1L));
assertThat(floorMod(12, 3), is(0L));
assertThat(floorMod(11, 3), is(2L));
assertThat(floorMod(-13, 3), is(2L));
assertThat(floorMod(-12, 3), is(0L));
assertThat(floorMod(-11, 3), is(1L));
assertThat(floorMod(0, 3), is(0L));
assertThat(floorMod(1, 3), is(1L));
assertThat(floorMod(-1, 3), is(2L));
}
@Test public void testEqWithAny() {
// Non-numeric same type equality check
assertThat(SqlFunctions.eqAny("hello", "hello"), is(true));
// Numeric types equality check
assertThat(SqlFunctions.eqAny(1, 1L), is(true));
assertThat(SqlFunctions.eqAny(1, 1.0D), is(true));
assertThat(SqlFunctions.eqAny(1L, 1.0D), is(true));
assertThat(SqlFunctions.eqAny(new BigDecimal(1L), 1), is(true));
assertThat(SqlFunctions.eqAny(new BigDecimal(1L), 1L), is(true));
assertThat(SqlFunctions.eqAny(new BigDecimal(1L), 1.0D), is(true));
assertThat(SqlFunctions.eqAny(new BigDecimal(1L), new BigDecimal(1.0D)),
is(true));
// Non-numeric different type equality check
assertThat(SqlFunctions.eqAny("2", 2), is(false));
}
@Test public void testNeWithAny() {
// Non-numeric same type inequality check
assertThat(SqlFunctions.neAny("hello", "world"), is(true));
// Numeric types inequality check
assertThat(SqlFunctions.neAny(1, 2L), is(true));
assertThat(SqlFunctions.neAny(1, 2.0D), is(true));
assertThat(SqlFunctions.neAny(1L, 2.0D), is(true));
assertThat(SqlFunctions.neAny(new BigDecimal(2L), 1), is(true));
assertThat(SqlFunctions.neAny(new BigDecimal(2L), 1L), is(true));
assertThat(SqlFunctions.neAny(new BigDecimal(2L), 1.0D), is(true));
assertThat(SqlFunctions.neAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is(true));
// Non-numeric different type inequality check
assertThat(SqlFunctions.neAny("2", 2), is(true));
}
@Test public void testLtWithAny() {
// Non-numeric same type "less then" check
assertThat(SqlFunctions.ltAny("apple", "banana"), is(true));
// Numeric types "less than" check
assertThat(SqlFunctions.ltAny(1, 2L), is(true));
assertThat(SqlFunctions.ltAny(1, 2.0D), is(true));
assertThat(SqlFunctions.ltAny(1L, 2.0D), is(true));
assertThat(SqlFunctions.ltAny(new BigDecimal(1L), 2), is(true));
assertThat(SqlFunctions.ltAny(new BigDecimal(1L), 2L), is(true));
assertThat(SqlFunctions.ltAny(new BigDecimal(1L), 2.0D), is(true));
assertThat(SqlFunctions.ltAny(new BigDecimal(1L), new BigDecimal(2.0D)),
is(true));
// Non-numeric different type but both implements Comparable
// "less than" check
try {
assertThat(SqlFunctions.ltAny("1", 2L), is(false));
fail("'lt' on non-numeric different type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for comparison: class java.lang.String < "
+ "class java.lang.Long"));
}
}
@Test public void testLeWithAny() {
// Non-numeric same type "less or equal" check
assertThat(SqlFunctions.leAny("apple", "banana"), is(true));
assertThat(SqlFunctions.leAny("apple", "apple"), is(true));
// Numeric types "less or equal" check
assertThat(SqlFunctions.leAny(1, 2L), is(true));
assertThat(SqlFunctions.leAny(1, 1L), is(true));
assertThat(SqlFunctions.leAny(1, 2.0D), is(true));
assertThat(SqlFunctions.leAny(1, 1.0D), is(true));
assertThat(SqlFunctions.leAny(1L, 2.0D), is(true));
assertThat(SqlFunctions.leAny(1L, 1.0D), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 2), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 1), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 2L), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 1L), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 2.0D), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), 1.0D), is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), new BigDecimal(2.0D)),
is(true));
assertThat(SqlFunctions.leAny(new BigDecimal(1L), new BigDecimal(1.0D)),
is(true));
// Non-numeric different type but both implements Comparable
// "less or equal" check
try {
assertThat(SqlFunctions.leAny("2", 2L), is(false));
fail("'le' on non-numeric different type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for comparison: class java.lang.String <= "
+ "class java.lang.Long"));
}
}
@Test public void testGtWithAny() {
// Non-numeric same type "greater then" check
assertThat(SqlFunctions.gtAny("banana", "apple"), is(true));
// Numeric types "greater than" check
assertThat(SqlFunctions.gtAny(2, 1L), is(true));
assertThat(SqlFunctions.gtAny(2, 1.0D), is(true));
assertThat(SqlFunctions.gtAny(2L, 1.0D), is(true));
assertThat(SqlFunctions.gtAny(new BigDecimal(2L), 1), is(true));
assertThat(SqlFunctions.gtAny(new BigDecimal(2L), 1L), is(true));
assertThat(SqlFunctions.gtAny(new BigDecimal(2L), 1.0D), is(true));
assertThat(SqlFunctions.gtAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is(true));
// Non-numeric different type but both implements Comparable
// "greater than" check
try {
assertThat(SqlFunctions.gtAny("2", 1L), is(false));
fail("'gt' on non-numeric different type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for comparison: class java.lang.String > "
+ "class java.lang.Long"));
}
}
@Test public void testGeWithAny() {
// Non-numeric same type "greater or equal" check
assertThat(SqlFunctions.geAny("banana", "apple"), is(true));
assertThat(SqlFunctions.geAny("apple", "apple"), is(true));
// Numeric types "greater or equal" check
assertThat(SqlFunctions.geAny(2, 1L), is(true));
assertThat(SqlFunctions.geAny(1, 1L), is(true));
assertThat(SqlFunctions.geAny(2, 1.0D), is(true));
assertThat(SqlFunctions.geAny(1, 1.0D), is(true));
assertThat(SqlFunctions.geAny(2L, 1.0D), is(true));
assertThat(SqlFunctions.geAny(1L, 1.0D), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(2L), 1), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(1L), 1), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(2L), 1L), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(1L), 1L), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(2L), 1.0D), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(1L), 1.0D), is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is(true));
assertThat(SqlFunctions.geAny(new BigDecimal(1L), new BigDecimal(1.0D)),
is(true));
// Non-numeric different type but both implements Comparable
// "greater or equal" check
try {
assertThat(SqlFunctions.geAny("2", 2L), is(false));
fail("'ge' on non-numeric different type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for comparison: class java.lang.String >= "
+ "class java.lang.Long"));
}
}
@Test public void testPlusAny() {
// null parameters
assertNull(SqlFunctions.plusAny(null, null));
assertNull(SqlFunctions.plusAny(null, 1));
assertNull(SqlFunctions.plusAny(1, null));
// Numeric types
assertThat(SqlFunctions.plusAny(2, 1L), is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(2, 1.0D), is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(2L, 1.0D), is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(new BigDecimal(2L), 1),
is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(new BigDecimal(2L), 1L),
is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(new BigDecimal(2L), 1.0D),
is((Object) new BigDecimal(3)));
assertThat(SqlFunctions.plusAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is((Object) new BigDecimal(3)));
// Non-numeric type
try {
SqlFunctions.plusAny("2", 2L);
fail("'plus' on non-numeric type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for arithmetic: class java.lang.String + "
+ "class java.lang.Long"));
}
}
@Test public void testMinusAny() {
// null parameters
assertNull(SqlFunctions.minusAny(null, null));
assertNull(SqlFunctions.minusAny(null, 1));
assertNull(SqlFunctions.minusAny(1, null));
// Numeric types
assertThat(SqlFunctions.minusAny(2, 1L), is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(2, 1.0D), is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(2L, 1.0D), is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(new BigDecimal(2L), 1),
is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(new BigDecimal(2L), 1L),
is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(new BigDecimal(2L), 1.0D),
is((Object) new BigDecimal(1)));
assertThat(SqlFunctions.minusAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is((Object) new BigDecimal(1)));
// Non-numeric type
try {
SqlFunctions.minusAny("2", 2L);
fail("'minus' on non-numeric type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for arithmetic: class java.lang.String - "
+ "class java.lang.Long"));
}
}
@Test public void testMultiplyAny() {
// null parameters
assertNull(SqlFunctions.multiplyAny(null, null));
assertNull(SqlFunctions.multiplyAny(null, 1));
assertNull(SqlFunctions.multiplyAny(1, null));
// Numeric types
assertThat(SqlFunctions.multiplyAny(2, 1L), is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(2, 1.0D),
is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(2L, 1.0D),
is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(new BigDecimal(2L), 1),
is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(new BigDecimal(2L), 1L),
is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(new BigDecimal(2L), 1.0D),
is((Object) new BigDecimal(2)));
assertThat(SqlFunctions.multiplyAny(new BigDecimal(2L), new BigDecimal(1.0D)),
is((Object) new BigDecimal(2)));
// Non-numeric type
try {
SqlFunctions.multiplyAny("2", 2L);
fail("'multiply' on non-numeric type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for arithmetic: class java.lang.String * "
+ "class java.lang.Long"));
}
}
@Test public void testDivideAny() {
// null parameters
assertNull(SqlFunctions.divideAny(null, null));
assertNull(SqlFunctions.divideAny(null, 1));
assertNull(SqlFunctions.divideAny(1, null));
// Numeric types
assertThat(SqlFunctions.divideAny(5, 2L),
is((Object) new BigDecimal("2.5")));
assertThat(SqlFunctions.divideAny(5, 2.0D),
is((Object) new BigDecimal("2.5")));
assertThat(SqlFunctions.divideAny(5L, 2.0D),
is((Object) new BigDecimal("2.5")));
assertThat(SqlFunctions.divideAny(new BigDecimal(5L), 2),
is((Object) new BigDecimal(2.5)));
assertThat(SqlFunctions.divideAny(new BigDecimal(5L), 2L),
is((Object) new BigDecimal(2.5)));
assertThat(SqlFunctions.divideAny(new BigDecimal(5L), 2.0D),
is((Object) new BigDecimal(2.5)));
assertThat(SqlFunctions.divideAny(new BigDecimal(5L), new BigDecimal(2.0D)),
is((Object) new BigDecimal(2.5)));
// Non-numeric type
try {
SqlFunctions.divideAny("5", 2L);
fail("'divide' on non-numeric type is not possible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(),
is("Invalid types for arithmetic: class java.lang.String / "
+ "class java.lang.Long"));
}
}
}
// End SqlFunctionsTest.java
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.reteoo;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.drools.FactHandle;
import org.drools.QueryResults;
import org.drools.SessionConfiguration;
import org.drools.base.DroolsQuery;
import org.drools.base.InternalViewChangedEventListener;
import org.drools.base.NonCloningQueryViewListener;
import org.drools.base.QueryRowWithSubruleIndex;
import org.drools.base.StandardQueryViewChangedEventListener;
import org.drools.common.AbstractWorkingMemory;
import org.drools.common.BaseNode;
import org.drools.common.EventFactHandle;
import org.drools.common.InternalAgenda;
import org.drools.common.InternalFactHandle;
import org.drools.common.InternalKnowledgeRuntime;
import org.drools.common.InternalRuleBase;
import org.drools.common.InternalWorkingMemory;
import org.drools.common.PropagationContextImpl;
import org.drools.common.TupleStartEqualsConstraint;
import org.drools.common.TupleStartEqualsConstraint.TupleStartEqualsConstraintContextEntry;
import org.drools.common.WorkingMemoryAction;
import org.drools.core.util.FastIterator;
import org.drools.core.util.index.RightTupleList;
import org.drools.event.AgendaEventSupport;
import org.drools.event.WorkingMemoryEventSupport;
import org.drools.impl.EnvironmentFactory;
import org.drools.impl.StatefulKnowledgeSessionImpl;
import org.drools.marshalling.impl.MarshallerReaderContext;
import org.drools.marshalling.impl.MarshallerWriteContext;
import org.drools.marshalling.impl.PersisterHelper;
import org.drools.marshalling.impl.ProtobufMessages;
import org.drools.marshalling.impl.ProtobufMessages.ActionQueue.Action;
import org.drools.marshalling.impl.ProtobufMessages.ActionQueue.Assert;
import org.drools.reteoo.AccumulateNode.AccumulateContext;
import org.drools.reteoo.AccumulateNode.AccumulateMemory;
import org.drools.reteoo.AccumulateNode.ActivitySource;
import org.drools.rule.Declaration;
import org.drools.rule.EntryPoint;
import org.drools.rule.Package;
import org.drools.rule.Rule;
import org.drools.runtime.Environment;
import org.drools.runtime.ObjectFilter;
import org.drools.runtime.rule.LiveQuery;
import org.drools.runtime.rule.ViewChangedEventListener;
import org.drools.runtime.rule.impl.LiveQueryImpl;
import org.drools.runtime.rule.impl.OpenQueryViewChangedEventListenerAdapter;
import org.drools.spi.FactHandleFactory;
import org.drools.spi.PropagationContext;
/**
* Implementation of <code>WorkingMemory</code>.
*/
public class ReteooWorkingMemory extends AbstractWorkingMemory implements ReteooWorkingMemoryInterface {
public ReteooWorkingMemory() {
super();
}
public ReteooWorkingMemory(final int id,
final InternalRuleBase ruleBase) {
this( id,
ruleBase,
SessionConfiguration.getDefaultInstance(),
EnvironmentFactory.newEnvironment() );
}
/**
* Construct.
*
* @param ruleBase
* The backing rule-base.
*/
public ReteooWorkingMemory(final int id,
final InternalRuleBase ruleBase,
final SessionConfiguration config,
final Environment environment) {
super( id,
ruleBase,
ruleBase.newFactHandleFactory(),
config,
environment );
this.agenda = ruleBase.getConfiguration().getComponentFactory().getAgendaFactory().createAgenda( ruleBase );
this.agenda.setWorkingMemory( this );
}
public ReteooWorkingMemory(final int id,
final InternalRuleBase ruleBase,
final SessionConfiguration config,
final Environment environment,
final WorkingMemoryEventSupport workingMemoryEventSupport,
final AgendaEventSupport agendaEventSupport) {
super( id,
ruleBase,
ruleBase.newFactHandleFactory(),
config,
environment,
workingMemoryEventSupport,
agendaEventSupport );
this.agenda = ruleBase.getConfiguration().getComponentFactory().getAgendaFactory().createAgenda( ruleBase );
this.agenda.setWorkingMemory( this );
}
public ReteooWorkingMemory(final int id,
final InternalRuleBase ruleBase,
final FactHandleFactory handleFactory,
final InternalFactHandle initialFactHandle,
final long propagationContext,
final SessionConfiguration config,
final InternalAgenda agenda,
final Environment environment) {
super( id,
ruleBase,
handleFactory,
initialFactHandle,
//ruleBase.newFactHandleFactory(context),
propagationContext,
config,
environment );
this.agenda = agenda;
this.agenda.setWorkingMemory( this );
// InputPersister.readFactHandles( context );
// super.read( context );
}
public QueryResults getQueryResults(final String query) {
return getQueryResults( query,
null );
}
@SuppressWarnings("unchecked")
public QueryResults getQueryResults(final String queryName,
final Object[] arguments) {
try {
startOperation();
this.ruleBase.readLock();
this.lock.lock();
this.ruleBase.executeQueuedActions();
executeQueuedActions();
DroolsQuery queryObject = new DroolsQuery( queryName,
arguments,
getQueryListenerInstance(),
false );
InternalFactHandle handle = this.handleFactory.newFactHandle( queryObject,
null,
this,
this );
final PropagationContext propagationContext = new PropagationContextImpl( getNextPropagationIdCounter(),
PropagationContext.ASSERTION,
null,
null,
handle,
agenda.getActiveActivations(),
agenda.getDormantActivations(),
getEntryPoint() );
getEntryPointNode().assertQuery( handle,
propagationContext,
this );
propagationContext.evaluateActionQueue( this );
this.handleFactory.destroyFactHandle( handle );
BaseNode[] nodes = this.ruleBase.getReteooBuilder().getTerminalNodes( queryObject.getQuery() );
List<Map<String, Declaration>> decls = new ArrayList<Map<String, Declaration>>();
if ( nodes != null ) {
for ( BaseNode node : nodes ) {
decls.add( ((QueryTerminalNode) node).getSubrule().getOuterDeclarations() );
}
}
executeQueuedActions();
return new QueryResults( (List<QueryRowWithSubruleIndex>) queryObject.getQueryResultCollector().getResults(),
decls.toArray( new Map[decls.size()] ),
this,
( queryObject.getQuery() != null ) ? queryObject.getQuery().getParameters() : new Declaration[0] );
} finally {
this.lock.unlock();
this.ruleBase.readUnlock();
endOperation();
}
}
private InternalViewChangedEventListener getQueryListenerInstance() {
switch ( this.config.getQueryListenerOption() ) {
case STANDARD :
return new StandardQueryViewChangedEventListener();
case LIGHTWEIGHT :
return new NonCloningQueryViewListener();
}
return null;
}
public LiveQuery openLiveQuery(final String query,
final Object[] arguments,
final ViewChangedEventListener listener) {
try {
startOperation();
this.ruleBase.readLock();
this.lock.lock();
this.ruleBase.executeQueuedActions();
executeQueuedActions();
DroolsQuery queryObject = new DroolsQuery( query,
arguments,
new OpenQueryViewChangedEventListenerAdapter( listener ),
true );
InternalFactHandle handle = this.handleFactory.newFactHandle( queryObject,
null,
this,
this );
final PropagationContext propagationContext = new PropagationContextImpl( getNextPropagationIdCounter(),
PropagationContext.ASSERTION,
null,
null,
handle,
agenda.getActiveActivations(),
agenda.getDormantActivations(),
getEntryPoint() );
getEntryPointNode().assertQuery( handle,
propagationContext,
this );
propagationContext.evaluateActionQueue( this );
executeQueuedActions();
return new LiveQueryImpl( this,
handle );
} finally {
this.lock.unlock();
this.ruleBase.readUnlock();
endOperation();
}
}
public void closeLiveQuery(final InternalFactHandle factHandle) {
try {
startOperation();
this.ruleBase.readLock();
this.lock.lock();
final PropagationContext propagationContext = new PropagationContextImpl( getNextPropagationIdCounter(),
PropagationContext.ASSERTION,
null,
null,
factHandle,
agenda.getActiveActivations(),
agenda.getDormantActivations(),
getEntryPoint() );
getEntryPointNode().retractQuery( factHandle,
propagationContext,
this );
propagationContext.evaluateActionQueue( this );
getFactHandleFactory().destroyFactHandle( factHandle );
} finally {
this.lock.unlock();
this.ruleBase.readUnlock();
endOperation();
}
}
public static class WorkingMemoryReteAssertAction
implements
WorkingMemoryAction {
private InternalFactHandle factHandle;
private boolean removeLogical;
private boolean updateEqualsMap;
private Rule ruleOrigin;
private LeftTuple leftTuple;
public WorkingMemoryReteAssertAction(final InternalFactHandle factHandle,
final boolean removeLogical,
final boolean updateEqualsMap,
final Rule ruleOrigin,
final LeftTuple leftTuple) {
this.factHandle = factHandle;
this.removeLogical = removeLogical;
this.updateEqualsMap = updateEqualsMap;
this.ruleOrigin = ruleOrigin;
this.leftTuple = leftTuple;
}
public WorkingMemoryReteAssertAction(MarshallerReaderContext context) throws IOException {
this.factHandle = context.handles.get( context.readInt() );
this.removeLogical = context.readBoolean();
this.updateEqualsMap = context.readBoolean();
if ( context.readBoolean() ) {
String pkgName = context.readUTF();
String ruleName = context.readUTF();
Package pkg = context.ruleBase.getPackage( pkgName );
this.ruleOrigin = pkg.getRule( ruleName );
}
if ( context.readBoolean() ) {
this.leftTuple = context.terminalTupleMap.get( context.readInt() );
}
}
public WorkingMemoryReteAssertAction(MarshallerReaderContext context,
Action _action) {
Assert _assert = _action.getAssert();
this.factHandle = context.handles.get( _assert.getHandleId() );
this.removeLogical = _assert.getRemoveLogical();
this.updateEqualsMap = _assert.getUpdateEqualsMap();
if ( _assert.hasTuple() ) {
String pkgName = _assert.getOriginPkgName();
String ruleName = _assert.getOriginRuleName();
Package pkg = context.ruleBase.getPackage( pkgName );
this.ruleOrigin = pkg.getRule( ruleName );
this.leftTuple = context.filter.getTuplesCache().get( PersisterHelper.createActivationKey( pkgName, ruleName, _assert.getTuple() ) );
}
}
public void write(MarshallerWriteContext context) throws IOException {
context.writeShort( WorkingMemoryAction.WorkingMemoryReteAssertAction );
context.writeInt( this.factHandle.getId() );
context.writeBoolean( this.removeLogical );
context.writeBoolean( this.updateEqualsMap );
if ( this.ruleOrigin != null ) {
context.writeBoolean( true );
context.writeUTF( ruleOrigin.getPackage() );
context.writeUTF( ruleOrigin.getName() );
} else {
context.writeBoolean( false );
}
if ( this.leftTuple != null ) {
context.writeBoolean( true );
context.writeInt( context.terminalTupleMap.get( this.leftTuple ) );
} else {
context.writeBoolean( false );
}
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
ProtobufMessages.ActionQueue.Assert.Builder _assert = ProtobufMessages.ActionQueue.Assert.newBuilder();
_assert.setHandleId( this.factHandle.getId() )
.setRemoveLogical( this.removeLogical )
.setUpdateEqualsMap( this.updateEqualsMap );
if ( this.leftTuple != null ) {
ProtobufMessages.Tuple.Builder _tuple = ProtobufMessages.Tuple.newBuilder();
for( LeftTuple entry = this.leftTuple; entry != null; entry = entry.getParent() ) {
_tuple.addHandleId( entry.getLastHandle().getId() );
}
_assert.setOriginPkgName( ruleOrigin.getPackageName() )
.setOriginRuleName( ruleOrigin.getName() )
.setTuple( _tuple.build() );
}
return ProtobufMessages.ActionQueue.Action.newBuilder()
.setType( ProtobufMessages.ActionQueue.ActionType.ASSERT )
.setAssert( _assert.build() )
.build();
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
factHandle = (InternalFactHandle) in.readObject();
removeLogical = in.readBoolean();
updateEqualsMap = in.readBoolean();
ruleOrigin = (Rule) in.readObject();
leftTuple = (LeftTuple) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( factHandle );
out.writeBoolean( removeLogical );
out.writeBoolean( updateEqualsMap );
out.writeObject( ruleOrigin );
out.writeObject( leftTuple );
}
public void execute(InternalWorkingMemory workingMemory) {
final PropagationContext context = new PropagationContextImpl( workingMemory.getNextPropagationIdCounter(),
PropagationContext.ASSERTION,
this.ruleOrigin,
this.leftTuple,
this.factHandle );
ReteooRuleBase ruleBase = (ReteooRuleBase) workingMemory.getRuleBase();
ruleBase.assertObject( this.factHandle,
this.factHandle.getObject(),
context,
workingMemory );
context.evaluateActionQueue( workingMemory );
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
}
public static class WorkingMemoryReteExpireAction
implements
WorkingMemoryAction {
private InternalFactHandle factHandle;
private ObjectTypeNode node;
public WorkingMemoryReteExpireAction(final InternalFactHandle factHandle,
final ObjectTypeNode node) {
this.factHandle = factHandle;
this.node = node;
}
public InternalFactHandle getFactHandle() {
return factHandle;
}
public void setFactHandle(InternalFactHandle factHandle) {
this.factHandle = factHandle;
}
public ObjectTypeNode getNode() {
return node;
}
public void setNode(ObjectTypeNode node) {
this.node = node;
}
public WorkingMemoryReteExpireAction(MarshallerReaderContext context) throws IOException {
this.factHandle = context.handles.get( context.readInt() );
final int nodeId = context.readInt();
this.node = (ObjectTypeNode) context.sinks.get( Integer.valueOf( nodeId ) );
}
public WorkingMemoryReteExpireAction(MarshallerReaderContext context,
Action _action) {
this.factHandle = context.handles.get( _action.getExpire().getHandleId() );
this.node = (ObjectTypeNode) context.sinks.get( Integer.valueOf( _action.getExpire().getNodeId() ) );
}
public void write(MarshallerWriteContext context) throws IOException {
context.writeShort( WorkingMemoryAction.WorkingMemoryReteExpireAction );
context.writeInt( this.factHandle.getId() );
context.writeInt( this.node.getId() );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
return ProtobufMessages.ActionQueue.Action.newBuilder()
.setType( ProtobufMessages.ActionQueue.ActionType.EXPIRE )
.setExpire( ProtobufMessages.ActionQueue.Expire.newBuilder()
.setHandleId( this.factHandle.getId() )
.setNodeId( this.node.getId() )
.build() )
.build();
}
public void execute(InternalWorkingMemory workingMemory) {
if ( this.factHandle.isValid() ) {
// if the fact is still in the working memory (since it may have been previously retracted already
final PropagationContext context = new PropagationContextImpl( workingMemory.getNextPropagationIdCounter(),
PropagationContext.EXPIRATION,
null,
null,
this.factHandle );
((EventFactHandle) factHandle).setExpired( true );
this.node.retractObject( factHandle,
context,
workingMemory );
context.evaluateActionQueue( workingMemory );
// if no activations for this expired event
if ( ((EventFactHandle) factHandle).getActivationsCount() == 0 ) {
// remove it from the object store and clean up resources
((EventFactHandle) factHandle).getEntryPoint().retract( factHandle );
}
context.evaluateActionQueue( workingMemory );
}
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class EvaluateResultConstraints
implements
WorkingMemoryAction {
private ActivitySource source;
private LeftTuple leftTuple;
private PropagationContext context;
private InternalWorkingMemory workingMemory;
private AccumulateMemory memory;
private AccumulateContext accctx;
private boolean useLeftMemory;
private AccumulateNode node;
public EvaluateResultConstraints(PropagationContext context) {
this.context = context;
}
public EvaluateResultConstraints(ActivitySource source,
LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory,
AccumulateMemory memory,
AccumulateContext accctx,
boolean useLeftMemory,
AccumulateNode node) {
this.source = source;
this.leftTuple = leftTuple;
this.context = context;
this.workingMemory = workingMemory;
this.memory = memory;
this.accctx = accctx;
this.useLeftMemory = useLeftMemory;
this.node = node;
}
public EvaluateResultConstraints(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
final AccumulateContext accctx = (AccumulateContext) leftTuple.getObject();
accctx.setAction( null );
node.evaluateResultConstraints( source,
leftTuple,
context,
workingMemory,
memory,
accctx,
useLeftMemory );
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public ActivitySource getSource() {
return source;
}
public void setSource(ActivitySource source) {
this.source = source;
}
public String toString() {
return "[ResumeInsertAction leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryInsertAction
implements
WorkingMemoryAction {
private PropagationContext context;
private InternalFactHandle factHandle;
private LeftTuple leftTuple;
private QueryElementNode node;
public QueryInsertAction(PropagationContext context) {
this.context = context;
}
public QueryInsertAction(PropagationContext context,
InternalFactHandle factHandle,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.factHandle = factHandle;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryInsertAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
// we null this as it blocks this query being called, to avoid re-entrant issues. i.e. scheduling an insert and then an update, before the insert is executed
((DroolsQuery) this.factHandle.getObject()).setAction( null );
workingMemory.getEntryPointNode().assertQuery( factHandle,
context,
workingMemory );
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public String toString() {
return "[QueryInsertAction facthandle=" + factHandle + ",\n leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryUpdateAction
implements
WorkingMemoryAction {
private PropagationContext context;
private InternalFactHandle factHandle;
private LeftTuple leftTuple;
private QueryElementNode node;
public QueryUpdateAction(PropagationContext context) {
this.context = context;
}
public QueryUpdateAction(PropagationContext context,
InternalFactHandle factHandle,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.factHandle = factHandle;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryUpdateAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
workingMemory.getEntryPointNode().modifyQuery( factHandle,
context,
workingMemory );
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public String toString() {
return "[QueryInsertModifyAction facthandle=" + factHandle + ",\n leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryRetractAction
implements
WorkingMemoryAction {
private PropagationContext context;
private LeftTuple leftTuple;
private QueryElementNode node;
public QueryRetractAction(PropagationContext context) {
this.context = context;
}
public QueryRetractAction(PropagationContext context,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryRetractAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
InternalFactHandle factHandle = (InternalFactHandle) leftTuple.getObject();
if ( node.isOpenQuery() ) {
// iterate to the query terminal node, as the child leftTuples will get picked up there
workingMemory.getEntryPointNode().retractObject( factHandle,
context,
workingMemory.getObjectTypeConfigurationRegistry().getObjectTypeConf( workingMemory.getEntryPoint(),
factHandle.getObject() ),
workingMemory );
//workingMemory.getFactHandleFactory().destroyFactHandle( factHandle );
} else {
// get child left tuples, as there is no open query
if ( leftTuple.getFirstChild() != null ) {
node.getSinkPropagator().propagateRetractLeftTuple( leftTuple,
context,
workingMemory );
}
}
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public String toString() {
return "[QueryRetractAction leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryResultInsertAction
implements
WorkingMemoryAction {
private PropagationContext context;
private LeftTuple leftTuple;
private InternalFactHandle factHandle;
private QueryElementNode node;
public QueryResultInsertAction(PropagationContext context) {
this.context = context;
}
public QueryResultInsertAction(PropagationContext context,
InternalFactHandle factHandle,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.factHandle = factHandle;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryResultInsertAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
DroolsQuery query = (DroolsQuery) factHandle.getObject();
RightTupleList rightTuples = query.getResultInsertRightTupleList();
query.setResultInsertRightTupleList( null ); // null so further operations happen on a new stack element
for ( RightTuple rightTuple = rightTuples.getFirst(); rightTuple != null; ) {
RightTuple tmp = (RightTuple) rightTuple.getNext();
rightTuples.remove( rightTuple );
for ( LeftTuple childLeftTuple = rightTuple.firstChild; childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getRightParentNext() ) {
node.getSinkPropagator().doPropagateAssertLeftTuple( context,
workingMemory,
childLeftTuple,
childLeftTuple.getLeftTupleSink() );
}
rightTuple = tmp;
}
// @FIXME, this should work, but it's closing needed fact handles
// actually an evaluation 34 appears on the stack twice....
// if ( !node.isOpenQuery() ) {
// workingMemory.getFactHandleFactory().destroyFactHandle( this.factHandle );
// }
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public LeftTuple getLeftTuple() {
return this.leftTuple;
}
public String toString() {
return "[QueryEvaluationAction leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryResultRetractAction
implements
WorkingMemoryAction {
private PropagationContext context;
private LeftTuple leftTuple;
private InternalFactHandle factHandle;
private QueryElementNode node;
public QueryResultRetractAction(PropagationContext context,
InternalFactHandle factHandle,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.factHandle = factHandle;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryResultRetractAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
DroolsQuery query = (DroolsQuery) factHandle.getObject();
RightTupleList rightTuples = query.getResultRetractRightTupleList();
query.setResultRetractRightTupleList( null ); // null so further operations happen on a new stack element
for ( RightTuple rightTuple = rightTuples.getFirst(); rightTuple != null; ) {
RightTuple tmp = (RightTuple) rightTuple.getNext();
rightTuples.remove( rightTuple );
this.node.getSinkPropagator().propagateRetractRightTuple( rightTuple,
context,
workingMemory );
rightTuple = tmp;
}
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public LeftTuple getLeftTuple() {
return this.leftTuple;
}
public String toString() {
return "[QueryResultRetractAction leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryResultUpdateAction
implements
WorkingMemoryAction {
private PropagationContext context;
private LeftTuple leftTuple;
InternalFactHandle factHandle;
private QueryElementNode node;
public QueryResultUpdateAction(PropagationContext context,
InternalFactHandle factHandle,
LeftTuple leftTuple,
QueryElementNode node) {
this.context = context;
this.factHandle = factHandle;
this.leftTuple = leftTuple;
this.node = node;
}
public QueryResultUpdateAction(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
DroolsQuery query = (DroolsQuery) factHandle.getObject();
RightTupleList rightTuples = query.getResultUpdateRightTupleList();
query.setResultUpdateRightTupleList( null ); // null so further operations happen on a new stack element
for ( RightTuple rightTuple = rightTuples.getFirst(); rightTuple != null; ) {
RightTuple tmp = (RightTuple) rightTuple.getNext();
rightTuples.remove( rightTuple );
this.node.getSinkPropagator().propagateModifyChildLeftTuple( rightTuple.firstChild,
rightTuple.firstChild.getLeftParent(),
context,
workingMemory,
true );
rightTuple = tmp;
}
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public LeftTuple getLeftTuple() {
return leftTuple;
}
public String toString() {
return "[QueryResultUpdateAction leftTuple=" + leftTuple + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public static class QueryRiaFixerNodeFixer
implements
WorkingMemoryAction {
private PropagationContext context;
private LeftTuple leftTuple;
private BetaNode node;
private boolean retract;
public QueryRiaFixerNodeFixer(PropagationContext context) {
this.context = context;
}
public QueryRiaFixerNodeFixer(PropagationContext context,
LeftTuple leftTuple,
boolean retract,
BetaNode node) {
this.context = context;
this.leftTuple = leftTuple;
this.retract = retract;
this.node = node;
}
public QueryRiaFixerNodeFixer(MarshallerReaderContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void write(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) throws IOException {
throw new UnsupportedOperationException( "Should not be present in network on serialisation" );
}
public void execute(InternalWorkingMemory workingMemory) {
leftTuple.setLeftTupleSink( this.node );
if ( leftTuple.getFirstChild() == null ) {
this.node.assertLeftTuple( leftTuple,
context,
workingMemory );
} else {
if ( retract ) {
this.node.getSinkPropagator().propagateRetractLeftTuple( leftTuple,
context,
workingMemory );
} else {
this.node.getSinkPropagator().propagateModifyChildLeftTuple( leftTuple,
context,
workingMemory,
true );
}
}
if ( leftTuple.getLeftParent() == null ) {
// It's not an open query, as we aren't recording parent chains, so we need to clear out right memory
Object node = workingMemory.getNodeMemory( this.node );
RightTupleMemory rightMemory = null;
if ( node instanceof BetaMemory ) {
rightMemory = ((BetaMemory) node).getRightTupleMemory();
} else if ( node instanceof AccumulateMemory ) {
rightMemory = ((AccumulateMemory) node).betaMemory.getRightTupleMemory();
}
final TupleStartEqualsConstraint constraint = TupleStartEqualsConstraint.getInstance();
TupleStartEqualsConstraintContextEntry contextEntry = new TupleStartEqualsConstraintContextEntry();
contextEntry.updateFromTuple( workingMemory, leftTuple );
FastIterator rightIt = rightMemory.fastIterator();
RightTuple temp = null;
for ( RightTuple rightTuple = rightMemory.getFirst( leftTuple, (InternalFactHandle) context.getFactHandle(), rightIt ); rightTuple != null; ) {
temp = (RightTuple) rightIt.next( rightTuple );
if ( constraint.isAllowedCachedLeft( contextEntry, rightTuple.getFactHandle() ) ) {
rightMemory.remove( rightTuple );
}
rightTuple = temp;
}
}
}
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
public String toString() {
return "[QueryRiaFixerNodeFixer leftTuple=" + leftTuple + ",\n retract=" + retract + "]\n";
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
}
public EntryPoint getEntryPoint() {
return this.defaultEntryPoint.getEntryPoint();
}
public InternalWorkingMemory getInternalWorkingMemory() {
return this;
}
public <T extends org.drools.runtime.rule.FactHandle> Collection<T> getFactHandles() {
List list = new ArrayList();
for ( Iterator it = iterateFactHandles(); it.hasNext(); ) {
FactHandle fh = ( FactHandle) it.next();
list.add( fh );
}
return list;
}
public <T extends org.drools.runtime.rule.FactHandle> Collection<T> getFactHandles(ObjectFilter filter) {
throw new UnsupportedOperationException( "this is implementedby StatefulKnowledgeImpl" );
}
public Collection<Object> getObjects() {
throw new UnsupportedOperationException( "this is implementedby StatefulKnowledgeImpl" );
}
public Collection<Object> getObjects(ObjectFilter filter) {
throw new UnsupportedOperationException( "this is implementedby StatefulKnowledgeImpl" );
}
}
| |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.systemchannels;
import android.content.pm.ActivityInfo;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.plugin.common.JSONMethodCodec;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* System channel that receives requests for host platform behavior, e.g., haptic and sound effects,
* system chrome configurations, and clipboard interaction.
*/
public class PlatformChannel {
private static final String TAG = "PlatformChannel";
@NonNull public final MethodChannel channel;
@Nullable private PlatformMessageHandler platformMessageHandler;
@NonNull @VisibleForTesting
protected final MethodChannel.MethodCallHandler parsingMethodCallHandler =
new MethodChannel.MethodCallHandler() {
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) {
if (platformMessageHandler == null) {
// If no explicit PlatformMessageHandler has been registered then we don't
// need to forward this call to an API. Return.
return;
}
String method = call.method;
Object arguments = call.arguments;
Log.v(TAG, "Received '" + method + "' message.");
try {
switch (method) {
case "SystemSound.play":
try {
SoundType soundType = SoundType.fromValue((String) arguments);
platformMessageHandler.playSystemSound(soundType);
result.success(null);
} catch (NoSuchFieldException exception) {
// The desired sound type does not exist.
result.error("error", exception.getMessage(), null);
}
break;
case "HapticFeedback.vibrate":
try {
HapticFeedbackType feedbackType =
HapticFeedbackType.fromValue((String) arguments);
platformMessageHandler.vibrateHapticFeedback(feedbackType);
result.success(null);
} catch (NoSuchFieldException exception) {
// The desired feedback type does not exist.
result.error("error", exception.getMessage(), null);
}
break;
case "SystemChrome.setPreferredOrientations":
try {
int androidOrientation = decodeOrientations((JSONArray) arguments);
platformMessageHandler.setPreferredOrientations(androidOrientation);
result.success(null);
} catch (JSONException | NoSuchFieldException exception) {
// JSONException: One or more expected fields were either omitted or referenced an
// invalid type.
// NoSuchFieldException: One or more expected fields were either omitted or
// referenced an invalid type.
result.error("error", exception.getMessage(), null);
}
break;
case "SystemChrome.setApplicationSwitcherDescription":
try {
AppSwitcherDescription description =
decodeAppSwitcherDescription((JSONObject) arguments);
platformMessageHandler.setApplicationSwitcherDescription(description);
result.success(null);
} catch (JSONException exception) {
// One or more expected fields were either omitted or referenced an invalid type.
result.error("error", exception.getMessage(), null);
}
break;
case "SystemChrome.setEnabledSystemUIOverlays":
try {
List<SystemUiOverlay> overlays = decodeSystemUiOverlays((JSONArray) arguments);
platformMessageHandler.showSystemOverlays(overlays);
result.success(null);
} catch (JSONException | NoSuchFieldException exception) {
// JSONException: One or more expected fields were either omitted or referenced an
// invalid type.
// NoSuchFieldException: One or more of the overlay names are invalid.
result.error("error", exception.getMessage(), null);
}
break;
case "SystemChrome.restoreSystemUIOverlays":
platformMessageHandler.restoreSystemUiOverlays();
result.success(null);
break;
case "SystemChrome.setSystemUIOverlayStyle":
try {
SystemChromeStyle systemChromeStyle =
decodeSystemChromeStyle((JSONObject) arguments);
platformMessageHandler.setSystemUiOverlayStyle(systemChromeStyle);
result.success(null);
} catch (JSONException | NoSuchFieldException exception) {
// JSONException: One or more expected fields were either omitted or referenced an
// invalid type.
// NoSuchFieldException: One or more of the brightness names are invalid.
result.error("error", exception.getMessage(), null);
}
break;
case "SystemNavigator.pop":
platformMessageHandler.popSystemNavigator();
result.success(null);
break;
case "Clipboard.getData":
{
String contentFormatName = (String) arguments;
ClipboardContentFormat clipboardFormat = null;
if (contentFormatName != null) {
try {
clipboardFormat = ClipboardContentFormat.fromValue(contentFormatName);
} catch (NoSuchFieldException exception) {
// An unsupported content format was requested. Return failure.
result.error(
"error", "No such clipboard content format: " + contentFormatName, null);
}
}
CharSequence clipboardContent =
platformMessageHandler.getClipboardData(clipboardFormat);
if (clipboardContent != null) {
JSONObject response = new JSONObject();
response.put("text", clipboardContent);
result.success(response);
} else {
result.success(null);
}
break;
}
case "Clipboard.setData":
{
String clipboardContent = ((JSONObject) arguments).getString("text");
platformMessageHandler.setClipboardData(clipboardContent);
result.success(null);
break;
}
default:
result.notImplemented();
break;
}
} catch (JSONException e) {
result.error("error", "JSON error: " + e.getMessage(), null);
}
}
};
/**
* Constructs a {@code PlatformChannel} that connects Android to the Dart code running in {@code
* dartExecutor}.
*
* <p>The given {@code dartExecutor} is permitted to be idle or executing code.
*
* <p>See {@link DartExecutor}.
*/
public PlatformChannel(@NonNull DartExecutor dartExecutor) {
channel = new MethodChannel(dartExecutor, "flutter/platform", JSONMethodCodec.INSTANCE);
channel.setMethodCallHandler(parsingMethodCallHandler);
}
/**
* Sets the {@link PlatformMessageHandler} which receives all events and requests that are parsed
* from the underlying platform channel.
*/
public void setPlatformMessageHandler(@Nullable PlatformMessageHandler platformMessageHandler) {
this.platformMessageHandler = platformMessageHandler;
}
// TODO(mattcarroll): add support for IntDef annotations, then add @ScreenOrientation
/**
* Decodes a series of orientations to an aggregate desired orientation.
*
* @throws JSONException if {@code encodedOrientations} does not contain expected keys and value
* types.
* @throws NoSuchFieldException if any given encoded orientation is not a valid orientation name.
*/
private int decodeOrientations(@NonNull JSONArray encodedOrientations)
throws JSONException, NoSuchFieldException {
int requestedOrientation = 0x00;
int firstRequestedOrientation = 0x00;
for (int index = 0; index < encodedOrientations.length(); index += 1) {
String encodedOrientation = encodedOrientations.getString(index);
DeviceOrientation orientation = DeviceOrientation.fromValue(encodedOrientation);
switch (orientation) {
case PORTRAIT_UP:
requestedOrientation |= 0x01;
break;
case PORTRAIT_DOWN:
requestedOrientation |= 0x04;
break;
case LANDSCAPE_LEFT:
requestedOrientation |= 0x02;
break;
case LANDSCAPE_RIGHT:
requestedOrientation |= 0x08;
break;
}
if (firstRequestedOrientation == 0x00) {
firstRequestedOrientation = requestedOrientation;
}
}
switch (requestedOrientation) {
case 0x00:
return ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED;
case 0x01:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
case 0x02:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
case 0x04:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
case 0x05:
return ActivityInfo.SCREEN_ORIENTATION_USER_PORTRAIT;
case 0x08:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
case 0x0a:
return ActivityInfo.SCREEN_ORIENTATION_USER_LANDSCAPE;
case 0x0b:
return ActivityInfo.SCREEN_ORIENTATION_USER;
case 0x0f:
return ActivityInfo.SCREEN_ORIENTATION_FULL_USER;
case 0x03: // portraitUp and landscapeLeft
case 0x06: // portraitDown and landscapeLeft
case 0x07: // portraitUp, portraitDown, and landscapeLeft
case 0x09: // portraitUp and landscapeRight
case 0x0c: // portraitDown and landscapeRight
case 0x0d: // portraitUp, portraitDown, and landscapeRight
case 0x0e: // portraitDown, landscapeLeft, and landscapeRight
// Android can't describe these cases, so just default to whatever the first
// specified value was.
switch (firstRequestedOrientation) {
case 0x01:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
case 0x02:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
case 0x04:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
case 0x08:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
}
// Execution should never get this far, but if it does then we default
// to a portrait orientation.
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
}
@NonNull
private AppSwitcherDescription decodeAppSwitcherDescription(
@NonNull JSONObject encodedDescription) throws JSONException {
int color = encodedDescription.getInt("primaryColor");
if (color != 0) { // 0 means color isn't set, use system default
color = color | 0xFF000000; // color must be opaque if set
}
String label = encodedDescription.getString("label");
return new AppSwitcherDescription(color, label);
}
/**
* Decodes a list of JSON-encoded overlays to a list of {@link SystemUiOverlay}.
*
* @throws JSONException if {@code encodedSystemUiOverlay} does not contain expected keys and
* value types.
* @throws NoSuchFieldException if any of the given encoded overlay names are invalid.
*/
@NonNull
private List<SystemUiOverlay> decodeSystemUiOverlays(@NonNull JSONArray encodedSystemUiOverlay)
throws JSONException, NoSuchFieldException {
List<SystemUiOverlay> overlays = new ArrayList<>();
for (int i = 0; i < encodedSystemUiOverlay.length(); ++i) {
String encodedOverlay = encodedSystemUiOverlay.getString(i);
SystemUiOverlay overlay = SystemUiOverlay.fromValue(encodedOverlay);
switch (overlay) {
case TOP_OVERLAYS:
overlays.add(SystemUiOverlay.TOP_OVERLAYS);
break;
case BOTTOM_OVERLAYS:
overlays.add(SystemUiOverlay.BOTTOM_OVERLAYS);
break;
}
}
return overlays;
}
/**
* Decodes a JSON-encoded {@code encodedStyle} to a {@link SystemChromeStyle}.
*
* @throws JSONException if {@code encodedStyle} does not contain expected keys and value types.
* @throws NoSuchFieldException if any provided brightness name is invalid.
*/
@NonNull
private SystemChromeStyle decodeSystemChromeStyle(@NonNull JSONObject encodedStyle)
throws JSONException, NoSuchFieldException {
Brightness systemNavigationBarIconBrightness = null;
// TODO(mattcarroll): add color annotation
Integer systemNavigationBarColor = null;
// TODO(mattcarroll): add color annotation
Integer systemNavigationBarDividerColor = null;
Brightness statusBarIconBrightness = null;
// TODO(mattcarroll): add color annotation
Integer statusBarColor = null;
if (!encodedStyle.isNull("systemNavigationBarIconBrightness")) {
systemNavigationBarIconBrightness =
Brightness.fromValue(encodedStyle.getString("systemNavigationBarIconBrightness"));
}
if (!encodedStyle.isNull("systemNavigationBarColor")) {
systemNavigationBarColor = encodedStyle.getInt("systemNavigationBarColor");
}
if (!encodedStyle.isNull("statusBarIconBrightness")) {
statusBarIconBrightness =
Brightness.fromValue(encodedStyle.getString("statusBarIconBrightness"));
}
if (!encodedStyle.isNull("statusBarColor")) {
statusBarColor = encodedStyle.getInt("statusBarColor");
}
if (!encodedStyle.isNull("systemNavigationBarDividerColor")) {
systemNavigationBarDividerColor = encodedStyle.getInt("systemNavigationBarDividerColor");
}
return new SystemChromeStyle(
statusBarColor,
statusBarIconBrightness,
systemNavigationBarColor,
systemNavigationBarIconBrightness,
systemNavigationBarDividerColor);
}
/**
* Handler that receives platform messages sent from Flutter to Android through a given {@link
* PlatformChannel}.
*
* <p>To register a {@code PlatformMessageHandler} with a {@link PlatformChannel}, see {@link
* PlatformChannel#setPlatformMessageHandler(PlatformMessageHandler)}.
*/
public interface PlatformMessageHandler {
/** The Flutter application would like to play the given {@code soundType}. */
void playSystemSound(@NonNull SoundType soundType);
/** The Flutter application would like to play the given haptic {@code feedbackType}. */
void vibrateHapticFeedback(@NonNull HapticFeedbackType feedbackType);
/** The Flutter application would like to display in the given {@code androidOrientation}. */
// TODO(mattcarroll): add @ScreenOrientation annotation
void setPreferredOrientations(int androidOrientation);
/**
* The Flutter application would like to be displayed in Android's app switcher with the visual
* representation described in the given {@code description}.
*
* <p>See the related Android documentation:
* https://developer.android.com/guide/components/activities/recents
*/
void setApplicationSwitcherDescription(@NonNull AppSwitcherDescription description);
/**
* The Flutter application would like the Android system to display the given {@code overlays}.
*
* <p>{@link SystemUiOverlay#TOP_OVERLAYS} refers to system overlays such as the status bar,
* while {@link SystemUiOverlay#BOTTOM_OVERLAYS} refers to system overlays such as the
* back/home/recents navigation on the bottom of the screen.
*
* <p>An empty list of {@code overlays} should hide all system overlays.
*/
void showSystemOverlays(@NonNull List<SystemUiOverlay> overlays);
/**
* The Flutter application would like to restore the visibility of system overlays to the last
* set of overlays sent via {@link #showSystemOverlays(List)}.
*
* <p>If {@link #showSystemOverlays(List)} has yet to be called, then a default system overlay
* appearance is desired:
*
* <p>{@code View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN }
*/
void restoreSystemUiOverlays();
/**
* The Flutter application would like the system chrome to present itself with the given {@code
* systemUiOverlayStyle}, i.e., the given status bar and navigation bar colors and brightness.
*/
void setSystemUiOverlayStyle(@NonNull SystemChromeStyle systemUiOverlayStyle);
/**
* The Flutter application would like to pop the top item off of the Android app's navigation
* back stack.
*/
void popSystemNavigator();
/**
* The Flutter application would like to receive the current data in the clipboard and have it
* returned in the given {@code format}.
*/
@Nullable
CharSequence getClipboardData(@Nullable ClipboardContentFormat format);
/**
* The Flutter application would like to set the current data in the clipboard to the given
* {@code text}.
*/
void setClipboardData(@NonNull String text);
}
/** Types of sounds the Android OS can play on behalf of an application. */
public enum SoundType {
CLICK("SystemSoundType.click");
@NonNull
static SoundType fromValue(@NonNull String encodedName) throws NoSuchFieldException {
for (SoundType soundType : SoundType.values()) {
if (soundType.encodedName.equals(encodedName)) {
return soundType;
}
}
throw new NoSuchFieldException("No such SoundType: " + encodedName);
}
@NonNull private final String encodedName;
SoundType(@NonNull String encodedName) {
this.encodedName = encodedName;
}
}
/** The types of haptic feedback that the Android OS can generate on behalf of an application. */
public enum HapticFeedbackType {
STANDARD(null),
LIGHT_IMPACT("HapticFeedbackType.lightImpact"),
MEDIUM_IMPACT("HapticFeedbackType.mediumImpact"),
HEAVY_IMPACT("HapticFeedbackType.heavyImpact"),
SELECTION_CLICK("HapticFeedbackType.selectionClick");
@NonNull
static HapticFeedbackType fromValue(@Nullable String encodedName) throws NoSuchFieldException {
for (HapticFeedbackType feedbackType : HapticFeedbackType.values()) {
if ((feedbackType.encodedName == null && encodedName == null)
|| (feedbackType.encodedName != null && feedbackType.encodedName.equals(encodedName))) {
return feedbackType;
}
}
throw new NoSuchFieldException("No such HapticFeedbackType: " + encodedName);
}
@Nullable private final String encodedName;
HapticFeedbackType(@Nullable String encodedName) {
this.encodedName = encodedName;
}
}
/** The possible desired orientations of a Flutter application. */
public enum DeviceOrientation {
PORTRAIT_UP("DeviceOrientation.portraitUp"),
PORTRAIT_DOWN("DeviceOrientation.portraitDown"),
LANDSCAPE_LEFT("DeviceOrientation.landscapeLeft"),
LANDSCAPE_RIGHT("DeviceOrientation.landscapeRight");
@NonNull
static DeviceOrientation fromValue(@NonNull String encodedName) throws NoSuchFieldException {
for (DeviceOrientation orientation : DeviceOrientation.values()) {
if (orientation.encodedName.equals(encodedName)) {
return orientation;
}
}
throw new NoSuchFieldException("No such DeviceOrientation: " + encodedName);
}
@NonNull private String encodedName;
DeviceOrientation(@NonNull String encodedName) {
this.encodedName = encodedName;
}
}
/**
* The set of Android system UI overlays as perceived by the Flutter application.
*
* <p>Android includes many more overlay options and flags than what is provided by {@code
* SystemUiOverlay}. Flutter only requires control over a subset of the overlays and those
* overlays are represented by {@code SystemUiOverlay} values.
*/
public enum SystemUiOverlay {
TOP_OVERLAYS("SystemUiOverlay.top"),
BOTTOM_OVERLAYS("SystemUiOverlay.bottom");
@NonNull
static SystemUiOverlay fromValue(@NonNull String encodedName) throws NoSuchFieldException {
for (SystemUiOverlay overlay : SystemUiOverlay.values()) {
if (overlay.encodedName.equals(encodedName)) {
return overlay;
}
}
throw new NoSuchFieldException("No such SystemUiOverlay: " + encodedName);
}
@NonNull private String encodedName;
SystemUiOverlay(@NonNull String encodedName) {
this.encodedName = encodedName;
}
}
/**
* The color and label of an application that appears in Android's app switcher, AKA recents
* screen.
*/
public static class AppSwitcherDescription {
// TODO(mattcarroll): add color annotation
public final int color;
@NonNull public final String label;
public AppSwitcherDescription(int color, @NonNull String label) {
this.color = color;
this.label = label;
}
}
/** The color and brightness of system chrome, e.g., status bar and system navigation bar. */
public static class SystemChromeStyle {
// TODO(mattcarroll): add color annotation
@Nullable public final Integer statusBarColor;
@Nullable public final Brightness statusBarIconBrightness;
// TODO(mattcarroll): add color annotation
@Nullable public final Integer systemNavigationBarColor;
@Nullable public final Brightness systemNavigationBarIconBrightness;
// TODO(mattcarroll): add color annotation
@Nullable public final Integer systemNavigationBarDividerColor;
public SystemChromeStyle(
@Nullable Integer statusBarColor,
@Nullable Brightness statusBarIconBrightness,
@Nullable Integer systemNavigationBarColor,
@Nullable Brightness systemNavigationBarIconBrightness,
@Nullable Integer systemNavigationBarDividerColor) {
this.statusBarColor = statusBarColor;
this.statusBarIconBrightness = statusBarIconBrightness;
this.systemNavigationBarColor = systemNavigationBarColor;
this.systemNavigationBarIconBrightness = systemNavigationBarIconBrightness;
this.systemNavigationBarDividerColor = systemNavigationBarDividerColor;
}
}
public enum Brightness {
LIGHT("Brightness.light"),
DARK("Brightness.dark");
@NonNull
static Brightness fromValue(@NonNull String encodedName) throws NoSuchFieldException {
for (Brightness brightness : Brightness.values()) {
if (brightness.encodedName.equals(encodedName)) {
return brightness;
}
}
throw new NoSuchFieldException("No such Brightness: " + encodedName);
}
@NonNull private String encodedName;
Brightness(@NonNull String encodedName) {
this.encodedName = encodedName;
}
}
/** Data formats of clipboard content. */
public enum ClipboardContentFormat {
PLAIN_TEXT("text/plain");
@NonNull
static ClipboardContentFormat fromValue(@NonNull String encodedName)
throws NoSuchFieldException {
for (ClipboardContentFormat format : ClipboardContentFormat.values()) {
if (format.encodedName.equals(encodedName)) {
return format;
}
}
throw new NoSuchFieldException("No such ClipboardContentFormat: " + encodedName);
}
@NonNull private String encodedName;
ClipboardContentFormat(@NonNull String encodedName) {
this.encodedName = encodedName;
}
}
}
| |
package at.ac.tuwien.iter.services.impl;
import java.io.File;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.xml.bind.JAXBException;
import org.apache.tapestry5.ioc.services.RegistryShutdownHub;
import org.apache.tapestry5.ioc.services.TypeCoercer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import at.ac.tuwien.iter.data.Test;
import at.ac.tuwien.iter.data.TestResult;
import at.ac.tuwien.iter.data.TestResultsCollector;
import at.ac.tuwien.iter.exceptions.TestExecutionException;
import at.ac.tuwien.iter.executors.BasicRunner;
import at.ac.tuwien.iter.executors.ConfigurationManager;
import at.ac.tuwien.iter.services.AssertionService;
import at.ac.tuwien.iter.services.DataCollectionService;
import at.ac.tuwien.iter.services.Iter;
import at.ac.tuwien.iter.services.LoadGenerator;
import at.ac.tuwien.iter.services.LoadGeneratorSource;
import at.ac.tuwien.iter.services.TestSuiteEvolver;
/**
* This is the main class that manages the test suite generation guided by the
* model. Each test suite generation process is (and must be independent!) from
* the others.
*
*
* @author alessiogambi
*
*/
public class IterImpl implements Iter {
// TODO To run the tests -> Check if Tapestry already provide one or we can
// just
// add to it
private ExecutorService executor;
private Logger logger;
// Inputs
private String customerName;
private String serviceName;
// This must be injected
private LoadGenerator loadGenerator;
// This is the result !
private Set<Test> testSuite;
private Map<Test, Integer> hitCount;
private TestResultsCollector testResultsCollector;
private List<Test> experimentAgenda;
// / VERY BAD !
private ConfigurationManager configurationManager;
private TypeCoercer typeCoercer;
private AssertionService assertionService;
private DataCollectionService dataCollectionService;
// Test Execution
private int nParallelTests;
private int nInitialTests;
private long experimentTimeout;
private boolean bootstrap;
private File bootstrapFile;
private File testResultFile;
private TestSuiteEvolver testSuiteEvolver;
private LoadGeneratorSource loadGeneratorSource;
public IterImpl(
// Resources
Logger logger,
// User inputs - Do we really need this here ? Those are used only
// because AUToCLES need that, maybe we can move them there - TODO
// Move this into Test execution framework
String customerName, String serviceName,
// Test Execution - TODO Move this into Test execution framework
int nParallelTests,
// Test Suite Initialization.
int nInitialTests,
// Input-output
final File testResultFile, final File bootstrapFile,
// Experimental Environment - TODO Move this into Test execution
// framework
URL autoclesURL,
// Experiment setup - Pre/Post conditions ?
long experimentTimeout, boolean bootstrap,
// Other services - Why we need this here ?
LoadGenerator loadGenerator,
// This should be avoided, but @PostInjection does not work fine
// apparently...
RegistryShutdownHub registryShutdownHub, //
TypeCoercer typeCoercer, // Infrastructure service
AssertionService assertionService, // OK
DataCollectionService dataCollectionService, // Not sure
TestSuiteEvolver testSuiteEvolver, // OK
LoadGeneratorSource loadGeneratorSource// Not sure
) {
this.bootstrap = bootstrap;
this.logger = logger;
this.customerName = customerName;
this.serviceName = serviceName;
this.loadGenerator = loadGenerator;
this.assertionService = assertionService;
this.dataCollectionService = dataCollectionService;
this.loadGeneratorSource = loadGeneratorSource;
this.nParallelTests = nParallelTests;
this.nInitialTests = nInitialTests;
this.experimentTimeout = experimentTimeout;
this.testResultsCollector = new TestResultsCollector();
this.testSuite = new HashSet<Test>();
this.hitCount = new Hashtable<Test, Integer>();
this.testResultFile = testResultFile;
this.bootstrapFile = bootstrapFile;
// this.mathEngineDao = mathEngineDao;
this.typeCoercer = typeCoercer;
this.testSuiteEvolver = testSuiteEvolver;
// TODO This must be removed, use a service instead and provide symbols
// !!!
this.configurationManager = new ConfigurationManager(
autoclesURL.toString(), this.customerName, this.serviceName,
this.loadGenerator);
// TODO EXECUTOR SERVICE MUST BE ACCESSED/INJECTED AS DEPENDENCY
// Register hook for shutdown
// Use the @PostInjection annotation, see the ref manual
registryShutdownHub.addRegistryShutdownListener(new Runnable() {
public void run() {
if (executor != null) {
executor.shutdown();
}
}
});
// TODO IS IT REALLY WORKING ?!
// Register hook for shutdown: STORE ALL THE RESULTS !
// Use the @PostInjection annotation, see the ref manual
registryShutdownHub.addRegistryShutdownListener(new Runnable() {
public void run() {
if (testResultsCollector != null) {
try {
TestResultsCollector.saveToFile(
testResultFile.getAbsolutePath(),
testResultsCollector);
} catch (JAXBException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
}
/**
* Try to bootstrap from the given file. If something goes wrong the
* bootstrap may add some (previously) test to the Agenda
*/
void bootstrap() throws TestExecutionException {
// Stats
int total = 0;
int failed = 0;
int rescheduled = 0;
String bFile = bootstrapFile.getAbsolutePath();
long startTime = System.currentTimeMillis();
logger.info("Iter.bootstrapAndStart() BootStraping from "
+ bootstrapFile.getAbsolutePath());
if (bootstrapFile.exists()) {
// Try Load all the cached executions if the file exists
try {
testResultsCollector = TestResultsCollector
.loadFromFile(bootstrapFile.getAbsolutePath());
} catch (Throwable e) {
logger.error(
String.format(
"Error. Cannot load the boostrap file %s. Skip the bootstrap process.",
bootstrapFile.getAbsolutePath()), e);
throw new TestExecutionException(
"Cannot load the bootstrap file");
}
// Collect some data
total = testResultsCollector.getTestResults().size();
try {
// Run the assertion again
for (TestResult testResult : testResultsCollector) {
logger.info("Processing testResult: " + testResult);
Test newTest = loadGeneratorSource.getLoadGenerator(
testResult.getLoadGeneratorID()).generateTest(
testResult.getParametersAsNumbers());
try {
logger.info("Registering test in testsuite: " + newTest);
testSuite.add(newTest);
} catch (Throwable e) {
logger.warn(
"Cannot store the test in the test suite file. Run again test "
+ newTest, e);
// TODO Check if this will eventually override the one
// stored in the boostraped file/
experimentAgenda.add(newTest);
rescheduled++;
failed++;
continue;
}
try {
logger.info("Checking Assertions for test: " + newTest);
assertionService.check(testResult);
} catch (Throwable e) {
logger.error(
"Cannot assert test results in bootstraping file. Skip !",
e);
failed++;
continue;
}
}
} catch (Throwable e) {
// TODO Not sure if really needed anymore
logger.error("Cannot complete the bootstrap!", e);
throw new TestExecutionException(
"Cannot complete the bootstrap");
}
} else {
logger.warn(String
.format("The specified boostraping file %s does not exists. Continue with no bootstrap.",
bootstrapFile.getAbsolutePath()));
}
long endTime = System.currentTimeMillis();
/*
* Print Bootstrap statistics
*/
StringBuffer sb = new StringBuffer();
sb.append("\n\n").append("=======================================\n")
.append("\tBootstrap summary\n")
.append("=======================================\n");
sb.append(" Elaborated ").append(total).append(" test results\n");
sb.append(" Input file ").append(bFile).append("\n");
sb.append(" Result:\n");
sb.append(" - Ok: ").append((total - failed)).append("\n");
sb.append(" - Failed: ").append(failed).append("\n");
sb.append(" - Rescheduled: ").append(rescheduled).append("\n");
sb.append(" Elaboration time was ")
.append(String.format("%.2f",
(double) ((endTime - startTime) / 1000l)))
.append(" secs\n");
sb.append("=======================================\n");
logger.info(sb.toString());
}
private List<Test> createRandomTests() {
List<Test> randomExperiments = new ArrayList<Test>();
for (int experiment = 0; experiment < nInitialTests; experiment++) {
randomExperiments.add(loadGenerator.generateRandomCase());
}
return randomExperiments;
}
// Mainly for unit testing
protected Collection<TestResult> getTestResults() {
return testResultsCollector.getTestResults();
}
protected Collection<Test> getTestSuite() {
return testSuite;
}
/*
* Schedule the experiments over the set of executors and then blocks until
* all the experiments ran. Experiments that run fine are removed from the
* list, if there is some failures in the execution the test must be
* repeated
*
* @param experiments
*/
private void scheduleAndRunExperiments(final List<Test> experiments)
throws InterruptedException {
final AtomicBoolean stopTest = new AtomicBoolean(false);
final UncaughtExceptionHandler uncaughtExceptionHandler = new UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable throwable) {
logger.error(" uncaughtException " + throwable.getMessage()
+ " from Thread " + thread);
if (throwable.getMessage().contains("STOP THE TEST")) {
stopTest.set(true);
}
}
};
// TODO Try to exploit the parallel executor provided by the framework
// itself. Note that we need to capture any exception generated by the
// worker threads !
final ThreadFactory factory = new ThreadFactory() {
public Thread newThread(Runnable runnable) {
final Thread thread = new Thread(runnable);
// Force our generated Handler here
thread.setUncaughtExceptionHandler(uncaughtExceptionHandler);
return thread;
}
};
executor = Executors.newFixedThreadPool(nParallelTests, factory);
for (final Test test : experiments) {
// Should I USED FUTURE<?> here ?
/*
* Something like: ExecutorService executor =
* Executors.newSingleThreadExecutor(); Runnable task = new
* Runnable() { public void run() { throw new
* RuntimeException("foo"); } };
*
* Future<?> future = executor.submit(task); try { future.get(); }
* catch (ExecutionException e) { Exception rootException =
* e.getCause(); }
*/
executor.execute(new Runnable() {
private void saveResultsToFile() throws JAXBException,
IOException {
// Store the result
String fileName = testResultFile.getAbsolutePath();
logger.debug("Basic Runner: Storing results to "
+ fileName);
TestResultsCollector.saveToFile(fileName,
testResultsCollector);
}
public void run() {
Logger _logger = LoggerFactory.getLogger(logger.getName()
+ "-Test-" + test.getId());
BasicRunner runner = new BasicRunner(_logger,
configurationManager, typeCoercer,
dataCollectionService, experimentTimeout);
TestResult testResult = null;
try {
testResult = runner.executeTest(test);
} catch (TestExecutionException e) {
e.printStackTrace();
if (e.getMessage().contains("STOP THE TEST")) {
logger.error("STOP THE TEST SUITE !");
throw new RuntimeException(e);
}
} catch (Exception e) {
logger.error("Error while executing the test");
throw new RuntimeException(e);
}
try {
// Store the execution in the TestResultCollectorFile -
// Only if there are not exceptions
testResultsCollector.addTestResult(testResult);
} catch (Exception e) {
logger.warn("Error while add test result to collector",
e);
}
// TODO Not sure this belongs HERE... maybe it's part of the
// "main" process.
// Increase our testsuite
synchronized (testSuite) {
testSuite.add(test);
// This is specific for our iter search !!
hitCount.put(test, 0);
}
logger.info("Added the test " + test.getId()
+ "to the test suite file");
// Run all the assertions: What if multiple threads run
// this at the same time ? MathDao should be synch and
// thread safe...
try {
assertionService.check(testResult);
} catch (Exception e) {
logger.warn(
"Failed to store assertion for " + test.getId()
+ "in the XML file !", e);
}
/*
* TODO: This fails always if there are no plastiicty check
* at all. For the moment we simply comment the code out !
* TODO Shall we capture this some how in a configurable way
* ? Maybe in the future we may implement somthing like: if
* fails the check then repeat
*/
// try {
//
// if (testResult.getTestReport("plasticity") != null
// && !testResult.getTestReport("plasticity")
// .isFailed()) {
// If the experiment was fine remove form the agenda
synchronized (experiments) {
logger.info("Removing " + test
+ " from the list of experiments to run");
if (!experiments.remove(test)) {
logger.warn("IterImpl.scheduleExperiments() ERROR while removing "
+ test);
}
}
//
// } else {
// logger.info("Plasticity check failed, so we keep the experiment for another round !");
// }
// } catch (Exception e) {
// logger.warn("Error while checink test reports", e);
// }
try {
saveResultsToFile();
} catch (Exception e) {
logger.warn(
"Failed to store result for " + test.getId()
+ "in the XML file !", e);
e.printStackTrace();
}
}
});
}
// Follow the common pattern for temporary thread pools and wait
// the end of all the experiments
executor.shutdown();
try {
// Wait until this is over or the process gets interrupted
executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES);
} catch (InterruptedException e) {
e.printStackTrace();
logger.warn(" Timeout on Wait for termination. Stop the test");
throw new RuntimeException("STOP THE TEST", e);
}
logger.info("Experiments ROUND finished !");
}
public void start() {
// State Variables
// Search status
boolean running = true;
// the Agenda contains the list of experiments to run.
// This is a "global" var in the class
experimentAgenda = new ArrayList<Test>();
if (bootstrap) {
// Shall we move exc inside the private method ?
try {
bootstrap();
} catch (TestExecutionException e) {
logger.warn("Problems during the bootstrap", e);
}
}
// If the r option is zero or not specified, we will not create random
// tests
experimentAgenda.addAll(createRandomTests());
while (running) {
try {
logger.info("IterImpl.start() Scheduling : "
+ experimentAgenda.size() + " experiments to run.");
// Schedule all the experiments over the N executors
// This can be esily become a service as well.
// This will block until all the experiments ran
scheduleAndRunExperiments(experimentAgenda);
// If for some reasons some experiment failed or must be
// repeated in the next round, we will keep it inside the agenda
} catch (InterruptedException e) {
logger.warn("Interrupted execution. Exit");
running = false;
break;
}
// Maybe the assertions should be here !?
logger.info("IterImpl.start() Experiments that remains to run or must be repeated: "
+ experimentAgenda.size());
/*
* - Make this a configurable setting -
*/
Collection<Test> newExperiments = null;
try {
// Evolve the test suite starting from the current one, plus the
// results obtained from it
newExperiments = testSuiteEvolver.evolveTestSuite(testSuite,
testResultsCollector.getTestResults());
} catch (Exception e) {
logger.warn("Error during test suite evolution. Continue", e);
}
// THIS IS SPECIFIC FOR OUR TEST SUITE. CAN BE A CONFIGURABLE
// SERVICE IN CHAIN/PIPELINE with constraints (on the number of test
// for example). no need for update result one we have everything
// inside the testSuiteObject !
experimentAgenda.addAll(newExperiments);
// Check for termination. No more experiments means we are done !
if (experimentAgenda.size() == 0) {
logger.info("There are no more tests to run !");
running = false;
}
}
// Store to file
try {
TestResultsCollector.saveToFile(testResultFile.getAbsolutePath(),
testResultsCollector);
logger.info("Results stored to " + testResultFile.getAbsolutePath());
} catch (Exception e) {
logger.error("Results cannot be stored to "
+ testResultFile.getAbsolutePath());
e.printStackTrace();
}
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.lint.checks;
import static com.android.SdkConstants.ATTR_NAME;
import static com.android.SdkConstants.ATTR_TYPE;
import static com.android.SdkConstants.TAG_ITEM;
import static com.android.utils.SdkUtils.getResourceFieldName;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.ide.common.resources.ResourceUrl;
import com.android.resources.ResourceFolderType;
import com.android.resources.ResourceType;
import com.android.tools.lint.detector.api.Category;
import com.android.tools.lint.detector.api.Context;
import com.android.tools.lint.detector.api.Implementation;
import com.android.tools.lint.detector.api.Issue;
import com.android.tools.lint.detector.api.LintUtils;
import com.android.tools.lint.detector.api.Location;
import com.android.tools.lint.detector.api.Location.Handle;
import com.android.tools.lint.detector.api.ResourceXmlDetector;
import com.android.tools.lint.detector.api.Scope;
import com.android.tools.lint.detector.api.Severity;
import com.android.tools.lint.detector.api.Speed;
import com.android.tools.lint.detector.api.TextFormat;
import com.android.tools.lint.detector.api.XmlContext;
import com.android.utils.Pair;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.w3c.dom.Attr;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This detector identifies cases where a resource is defined multiple times in the
* same resource folder
*/
public class DuplicateResourceDetector extends ResourceXmlDetector {
/** The main issue discovered by this detector */
@SuppressWarnings("unchecked")
public static final Issue ISSUE = Issue.create(
"DuplicateDefinition", //$NON-NLS-1$
"Duplicate definitions of resources",
"You can define a resource multiple times in different resource folders; that's how " +
"string translations are done, for example. However, defining the same resource " +
"more than once in the same resource folder is likely an error, for example " +
"attempting to add a new resource without realizing that the name is already used, " +
"and so on.",
Category.CORRECTNESS,
6,
Severity.ERROR,
new Implementation(
DuplicateResourceDetector.class,
// We should be able to do this incrementally!
Scope.ALL_RESOURCES_SCOPE,
Scope.RESOURCE_FILE_SCOPE));
/** Wrong resource value type */
public static final Issue TYPE_MISMATCH = Issue.create(
"ReferenceType", //$NON-NLS-1$
"Incorrect reference types",
"When you generate a resource alias, the resource you are pointing to must be " +
"of the same type as the alias",
Category.CORRECTNESS,
8,
Severity.FATAL,
new Implementation(
DuplicateResourceDetector.class,
Scope.RESOURCE_FILE_SCOPE));
private static final String PRODUCT = "product"; //$NON-NLS-1$
private Map<ResourceType, Set<String>> mTypeMap;
private Map<ResourceType, List<Pair<String, Location.Handle>>> mLocations;
private File mParent;
/** Constructs a new {@link DuplicateResourceDetector} */
public DuplicateResourceDetector() {
}
@NonNull
@Override
public Speed getSpeed() {
return Speed.NORMAL;
}
@Override
@Nullable
public Collection<String> getApplicableAttributes() {
return Collections.singletonList(ATTR_NAME);
}
@Override
public boolean appliesTo(@NonNull ResourceFolderType folderType) {
return folderType == ResourceFolderType.VALUES;
}
@Override
public void beforeCheckFile(@NonNull Context context) {
File parent = context.file.getParentFile();
if (!parent.equals(mParent)) {
mParent = parent;
mTypeMap = Maps.newEnumMap(ResourceType.class);
mLocations = Maps.newEnumMap(ResourceType.class);
}
}
@Override
public void visitAttribute(@NonNull XmlContext context, @NonNull Attr attribute) {
Element element = attribute.getOwnerElement();
if (element.hasAttribute(PRODUCT)) {
return;
}
String tag = element.getTagName();
String typeString = tag;
if (tag.equals(TAG_ITEM)) {
typeString = element.getAttribute(ATTR_TYPE);
if (typeString == null || typeString.isEmpty()) {
if (element.getParentNode().getNodeName().equals(
ResourceType.STYLE.getName()) && isFirstElementChild(element)) {
checkUniqueNames(context, (Element) element.getParentNode());
}
return;
}
}
ResourceType type = ResourceType.getEnum(typeString);
if (type == null) {
return;
}
if (type == ResourceType.ATTR
&& element.getParentNode().getNodeName().equals(
ResourceType.DECLARE_STYLEABLE.getName())) {
if (isFirstElementChild(element)) {
checkUniqueNames(context, (Element) element.getParentNode());
}
return;
}
NodeList children = element.getChildNodes();
int childCount = children.getLength();
for (int i = 0; i < childCount; i++) {
Node child = children.item(i);
if (child.getNodeType() == Node.TEXT_NODE) {
String text = child.getNodeValue();
for (int j = 0, length = text.length(); j < length; j++) {
char c = text.charAt(j);
if (c == '@') {
if (!text.regionMatches(false, j + 1, typeString, 0,
typeString.length()) && context.isEnabled(TYPE_MISMATCH)) {
ResourceUrl url = ResourceUrl.parse(text.trim());
if (url != null && url.type != type &&
// colors and mipmaps can apparently be used as drawables
!(type == ResourceType.DRAWABLE
&& (url.type == ResourceType.COLOR
|| url.type == ResourceType.MIPMAP))) {
String message = "Unexpected resource reference type; "
+ "expected value of type `@" + type + "/`";
context.report(TYPE_MISMATCH, element,
context.getLocation(child),
message);
}
}
break;
} else if (!Character.isWhitespace(c)) {
break;
}
}
break;
}
}
Set<String> names = mTypeMap.get(type);
if (names == null) {
names = Sets.newHashSetWithExpectedSize(40);
mTypeMap.put(type, names);
}
String name = attribute.getValue();
String originalName = name;
// AAPT will flatten the namespace, turning dots, dashes and colons into _
name = getResourceFieldName(name);
if (names.contains(name)) {
String message = String.format("`%1$s` has already been defined in this folder", name);
if (!name.equals(originalName)) {
message += " (`" + name + "` is equivalent to `" + originalName + "`)";
}
Location location = context.getLocation(attribute);
List<Pair<String, Handle>> list = mLocations.get(type);
for (Pair<String, Handle> pair : list) {
if (name.equals(pair.getFirst())) {
Location secondary = pair.getSecond().resolve();
secondary.setMessage("Previously defined here");
location.setSecondary(secondary);
}
}
context.report(ISSUE, attribute, location, message);
} else {
names.add(name);
List<Pair<String, Handle>> list = mLocations.get(type);
if (list == null) {
list = Lists.newArrayList();
mLocations.put(type, list);
}
Location.Handle handle = context.createLocationHandle(attribute);
list.add(Pair.of(name, handle));
}
}
private static void checkUniqueNames(XmlContext context, Element parent) {
List<Element> items = LintUtils.getChildren(parent);
if (items.size() > 1) {
Set<String> names = Sets.newHashSet();
for (Element item : items) {
Attr nameNode = item.getAttributeNode(ATTR_NAME);
if (nameNode != null) {
String name = nameNode.getValue();
if (names.contains(name) && context.isEnabled(ISSUE)) {
Location location = context.getLocation(nameNode);
for (Element prevItem : items) {
Attr attribute = item.getAttributeNode(ATTR_NAME);
if (attribute != null && name.equals(attribute.getValue())) {
assert prevItem != item;
Location prev = context.getLocation(prevItem);
prev.setMessage("Previously defined here");
location.setSecondary(prev);
break;
}
}
String message = String.format(
"`%1$s` has already been defined in this `<%2$s>`",
name, parent.getTagName());
context.report(ISSUE, nameNode, location, message);
}
names.add(name);
}
}
}
}
private static boolean isFirstElementChild(Node node) {
node = node.getPreviousSibling();
while (node != null) {
if (node.getNodeType() == Node.ELEMENT_NODE) {
return false;
}
node = node.getPreviousSibling();
}
return true;
}
/**
* Returns the resource type expected for a {@link #TYPE_MISMATCH} error reported by
* this lint detector. Intended for IDE quickfix implementations.
*
* @param message the error message created by this lint detector
* @param format the format of the error message
*/
public static String getExpectedType(@NonNull String message, @NonNull TextFormat format) {
return LintUtils.findSubstring(format.toText(message), "value of type @", "/");
}
}
| |
/*
* ReportSpecificationsUTh_Ign.java
*
* Copyright 2006-2018 James F. Bowring, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.dataDictionaries.reportSpecifications;
import java.util.HashMap;
import java.util.Map;
import org.earthtime.dataDictionaries.RadDates;
import org.earthtime.dataDictionaries.UThAnalysisMeasures;
import org.earthtime.dataDictionaries.UThCompositionalMeasures;
import org.earthtime.dataDictionaries.UThFractionationCorrectedIsotopicRatios;
/**
*
* @author James F. Bowring
*/
public class ReportSpecificationsUTh_Ign extends ReportSpecificationsAbstract {
// Report column order =
// displayName1, displayName2, displayName3, units, retrieveMethodName, retrieveParameterName, uncertaintyType,
// footnoteSpec, visible, useArbitrary? for value, digitcount value, unct visible (if required), description where needed,
// needsLead, needsUranium
// details: https://docs.google.com/spreadsheets/d/1w15Nc4uD2WEg_e5KvAsNJrBaIImTPPTM8f33NaKkw34/edit#gid=0
/**
*
*/
public static final String[][] ReportCategory_USeriesReportTable = new String[][]{
// {"", "", "IGSN", "", "getCompositionalMeasureByName", UThCompositionalMeasures.conc238U.getName(), "ABS",
// "", "true", "true", "3", "", "238U concentration", "false", "false"
// },
//
{"", "Th", "", "ppm", "getCompositionalMeasureByName", UThCompositionalMeasures.conc232Th.getName(), "ABS",
"", "true", "false", "3", "true", "232Th concentration", "false", "false"
},
{"", "[232Th]", "", "*1e3 dpm/g", "getCompositionalMeasureByName", UThCompositionalMeasures.arConc232Th.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "232Th concentration activity", "false", "false"
},
{"", "U", "", "ppm", "getCompositionalMeasureByName", UThCompositionalMeasures.conc238U.getName(), "ABS",
"", "true", "false", "3", "true", "238U concentration", "false", "false"
},
{"", "[238U]", "", "dpm/g", "getCompositionalMeasureByName", UThCompositionalMeasures.arConc238U.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "238U concentration activity", "false", "false"
},
//
{"", "230Th", "", "ppt", "getCompositionalMeasureByName", UThCompositionalMeasures.conc230Th.getName(), "ABS",
"", "false", "false", "3", "true", "230Th concentration", "false", "false"
},
//
{"", "[238U/", " 232Th]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar238U_232Thfc.getName(), "ABS",
"FN-1", "true", "false", "3", "true", "[238U/232Th]", "false", "false"
},
{"", "238U/", "232Th", "", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r238U_232Thfc.getName(), "ABS",
"", "false", "false", "3", "true", "238U/232Th", "false", "false"
},
//
{"", "[230Th/", " 232Th]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar230Th_232Thfc.getName(), "ABS",
"FN-1", "true", "false", "3", "true", "[230Th/232Th]", "false", "false"
},
{"", "230Th/", "232Th", "", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r230Th_232Thfc.getName(), "ABS",
"", "false", "false", "3", "true", "230Th/232Th", "false", "false"
},
//
{"", "[230Th/", " 238U]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar230Th_238Ufc.getName(), "ABS",
"FN-1&FN-2", "false", "false", "3", "true", "[230Th/238U] (not detrital Th-corr.)", "false", "false"
},
{"", "230Th/", "238U", "*1e5", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r230Th_238Ufc.getName(), "ABS",
"FN-2", "false", "false", "3", "true", "230Th/238U (not detrital Th-corr.)", "false", "false"
},
//
{"[232Th/", " 238U]", "", "*1e5", "getAnalysisMeasure", UThAnalysisMeasures.ar232Th_238Ufc.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "[232Th/238U]", "false", "false"
},
{"232Th/", "238U", "", "*1e5", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r232Th_238Ufc.getName(), "ABS",
"", "false", "false", "3", "true", "232Th/238U", "false", "false"
},
//
{"", "", "delta234U", "", "getAnalysisMeasure", UThAnalysisMeasures.delta234U.getName(), "ABS",
"FN-3", "false", "false", "3", "true", "delta234U (not detrital Th-corr.)", "false", "false"
},
{"", "[234U/", " 238U]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar234U_238Ufc.getName(), "ABS",
"FN-1&FN-3", "true", "false", "3", "true", "[234U/238U] (not detrital Th-corr.)", "false", "false"
},
{"", "234U/", "238U", "*1e5", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r234U_238Ufc.getName(), "ABS",
"FN-3", "false", "false", "3", "true", "234U/238U (not detrital Th-corr.)", "false", "false"
},
//
{"", "[230Th]", "", "dpm/g", "getCompositionalMeasureByName", UThCompositionalMeasures.arConc230Th.getName(), "ABS",
"FN-1", "true", "false", "3", "true", "230Th concentration activity", "false", "false"
},
{"", "[226Ra]", "", "dpm/g", "getCompositionalMeasureByName", UThCompositionalMeasures.arConc226Ra.getName(), "ABS",
"FN-1", "true", "false", "3", "true", "226Ra concentration activity", "false", "false"
},
//
{"", "[226Ra/", " 230Th]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar226Ra_230Thfc.getName(), "ABS",
"FN-1&FN-2", "true", "false", "3", "true", "[226Ra/230Th]", "false", "false"
},
{"", "226Ra/", "230Th", "", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r226Ra_230Thfc.getName(), "ABS",
"FN-2", "false", "false", "3", "true", "226Ra/230Th", "false", "false"
},
{"", "Ba", "", "ppm", "getCompositionalMeasureByName", UThCompositionalMeasures.concBa.getName(), "ABS",
"FN-4", "true", "true", "4", "false", "Ba concentration", "false", "false"
},
//
{"", "[230Th/", " 238U]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar230Th_238Udc.getName(), "ABS",
"FN-1&FN-2", "false", "false", "3", "true", "[230Th/238U] (detrital Th-corr.)", "false", "false"
},
{"", "230Th/", "238U", "*1e5", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r230Th_238Udc.getName(), "ABS",
"FN-2", "false", "false", "3", "true", "230Th/238U (detrital Th-corr.)", "false", "false"
},
//
{"", "", "delta234U", "", "getAnalysisMeasure", UThAnalysisMeasures.delta234Udc.getName(), "ABS",
"", "false", "false", "3", "true", "delta234U (detrital Th-corr.)", "false", "false"
},
{"", "[234U/", " 238U]", "", "getAnalysisMeasure", UThAnalysisMeasures.ar234U_238Udc.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "[234U/238U] (detrital Th-corr.)", "false", "false"
},
{"", "234U/", "238U", "*1e5", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r234U_238Udc.getName(), "ABS",
"", "false", "false", "3", "true", "234U/238U (detrital Th-corr.)", "false", "false"
},
//
{"", "", "Date", "ka", "getRadiogenicIsotopeDateByName", RadDates.date.getName(), "ABS",
"FN-6", "false", "false", "2", "true", "Date (not detrital Th-corr.)", "false", "false"
},
{" BP", "", "Date", "ka", "getRadiogenicIsotopeDateByName", RadDates.dateBP.getName(), "ABS",
"FN-7", "false", "false", "2", "true", "Date BP (not detrital Th-corr.)", "false", "false"
},
{"", "", "Date", "ka", "getRadiogenicIsotopeDateByName", RadDates.dateCorr.getName(), "ABS",
"", "false", "false", "2", "true", "Date (detrital Th-corr.)", "false", "false"
},
{" BP", "", "Date", "ka", "getRadiogenicIsotopeDateByName", RadDates.dateCorrBP.getName(), "ABS",
"", "false", "false", "2", "true", "Date BP (detrital Th-corr.)", "false", "false"
},
//
{"", "delta234U", "initial", "", "getAnalysisMeasure", UThAnalysisMeasures.delta234Ui.getName(), "ABS",
"", "false", "false", "3", "true", "initial delta234U (not detrital Th-corr.)", "false", "false"
},
{"[234U/", " 238U]", "initial", "", "getAnalysisMeasure", UThAnalysisMeasures.ar234U_238Ui.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "initial [234U/238U] (not detrital Th-corr.)", "false", "false"
},
{"234U/", "238U", "initial", "", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r234U_238Ui.getName(), "ABS",
"", "false", "false", "3", "true", "initial 234U/238U (not detrital Th-corr.)", "false", "false"
},
//
{"", "delta234U", "initial", "", "getAnalysisMeasure", UThAnalysisMeasures.delta234Uidc.getName(), "ABS",
"", "false", "false", "3", "true", "initial delta234U (detrital Th-corr.)", "false", "false"
},
{"[234U/", " 238U]", "initial", "", "getAnalysisMeasure", UThAnalysisMeasures.ar234U_238Uidc.getName(), "ABS",
"FN-1", "false", "false", "3", "true", "initial [234U/238U] (detrital Th-corr.)", "false", "false"
},
{"234U/", "238U", "initial", "", "getRadiogenicIsotopeRatioByName", UThFractionationCorrectedIsotopicRatios.r234U_238Uidc.getName(), "ABS",
"", "false", "false", "3", "true", "initial 234U/238U (detrital Th-corr.)", "false", "false"
},
//
{"rho", "date", "delta234U", "", "getAnalysisMeasure", UThAnalysisMeasures.rhoDate__delta234Ui.getName(), "",
"", "false", "true", "3", "", "Corr. Coef. date - delta234Ui", "false", "false"
}
};
/**
*
*/
public final static Map<String, String> reportTableFootnotes = new HashMap<String, String>();
static {
reportTableFootnotes.put(//
"FN-1", //
"Square brackets indicate activity ratios calculated from atomic abundances using the decay constants\n"
+ "<lambda238>, <lambda230>, and <lambda226>.");
reportTableFootnotes.put(//
"FN-2", //
"Ratio measured with a tracer calibrated against gravimetric U and Th solutions.");
reportTableFootnotes.put(//
"FN-3", //
"Ratio calibrated to a gravimetric U solution.");
reportTableFootnotes.put(
"FN-4",
"Ba concentrations have an uncertainty of 0.1%.");
reportTableFootnotes.put(//
"FN-6", //
"Uncorrected, closed-system date calculated using the decay constants <lambda230>,\n<lambda234>, and <lambda238>.\nDates reported relative to"
+ " the date of analysis, <dateOfAnalysis>,\nand do not include uncertainties associated with decay constants.");
reportTableFootnotes.put(//
"FN-7", //
"Uncorrected, closed-system date calculated using the decay constants <lambda230>,\n<lambda234>, and <lambda238>.\nDates reported as Before Present (BP),"
+ " where Present is the year 1950 CE.,\nand do not include uncertainties associated with decay constants.");
}
}
| |
/*
* Copyright 2005 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.rhino.Node;
import java.util.*;
/**
* Tests for {@link RenameVars}.
*/
public class RenameVarsTest extends CompilerTestCase {
private static final String DEFAULT_PREFIX = "";
private String prefix = DEFAULT_PREFIX;
private VariableMap previouslyUsedMap =
new VariableMap(new HashMap<String, String>());
private RenameVars renameVars;
private boolean withClosurePass = false;
private boolean localRenamingOnly = false;
private boolean preserveFunctionExpressionNames = false;
private boolean useGoogleCodingConvention = true;
private boolean generatePseudoNames = false;
@Override
protected CodingConvention getCodingConvention() {
if (useGoogleCodingConvention) {
return new GoogleCodingConvention();
} else {
return new DefaultCodingConvention();
}
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
if (withClosurePass) {
return new ClosurePassAndRenameVars(compiler);
} else {
return renameVars = new RenameVars(compiler, prefix,
localRenamingOnly, preserveFunctionExpressionNames,
generatePseudoNames,
previouslyUsedMap, null, null);
}
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
protected void setUp() throws Exception {
callSuper.setUp();
previouslyUsedMap = new VariableMap(new HashMap<String, String>());
prefix = DEFAULT_PREFIX;
withClosurePass = false;
localRenamingOnly = false;
preserveFunctionExpressionNames = false;
generatePseudoNames = false;
// TODO(johnlenz): Enable Normalize during these tests.
}
public void testRenameSimple() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
}
public void testRenameGlobals() {
test("var Foo; var Bar, y; function x() { Bar++; }",
"var a; var b, c; function d() { b++; }");
}
public void testRenameLocals() {
test("(function (v1, v2) {}); (function (v3, v4) {});",
"(function (a, b) {}); (function (a, b) {});");
test("function f1(v1, v2) {}; function f2(v3, v4) {};",
"function c(a, b) {}; function d(a, b) {};");
}
public void testRenameRedeclaredGlobals() {
test("function f1(v1, v2) {f1()};" +
"/** @suppress {duplicate} */" +
"function f1(v3, v4) {f1()};",
"function a(b, c) {a()};" +
"function a(b, c) {a()};");
localRenamingOnly = true;
test("function f1(v1, v2) {f1()};" +
"/** @suppress {duplicate} */" +
"function f1(v3, v4) {f1()};",
"function f1(a, b) {f1()};" +
"function f1(a, b) {f1()};");
}
public void testRecursiveFunctions1() {
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var d = function a(b, c) {" +
" a(b, c);" +
"};");
localRenamingOnly = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var walk = function a(b, c) {" +
" a(b, c);" +
"};");
}
public void testRecursiveFunctions2() {
preserveFunctionExpressionNames = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var c = function walk(a, b) {" +
" walk(a, b);" +
"};");
localRenamingOnly = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var walk = function walk(a, b) {" +
" walk(a, b);" +
"};");
}
public void testRenameLocalsClashingWithGlobals() {
test("function a(v1, v2) {return v1;} a();",
"function a(b, c) {return b;} a();");
}
public void testRenameNested() {
test("function f1(v1, v2) { (function(v3, v4) {}) }",
"function a(b, c) { (function(d, e) {}) }");
test("function f1(v1, v2) { function f2(v3, v4) {} }",
"function a(b, c) { function d(e, f) {} }");
}
public void testRenameWithExterns1() {
String externs = "var foo;";
test(externs, "var bar; foo(bar);", "var a; foo(a);", null, null);
}
public void testRenameWithExterns2() {
String externs = "var a;";
test(externs, "var b = 5", "var b = 5", null, null);
}
public void testDoNotRenameExportedName() {
test("_foo()", "_foo()");
}
public void testRenameWithNameOverlap() {
test("var a = 1; var b = 2; b + b;",
"var a = 1; var b = 2; b + b;");
}
public void testRenameWithPrefix1() {
prefix = "PRE_";
test("function Foo(v1, v2) {return v1} Foo();",
"function PRE_(a, b) {return a} PRE_();");
prefix = DEFAULT_PREFIX;
}
public void testRenameWithPrefix2() {
prefix = "PRE_";
test("function Foo(v1, v2) {var v3 = v1 + v2; return v3;} Foo();",
"function PRE_(a, b) {var c = a + b; return c;} PRE_();");
prefix = DEFAULT_PREFIX;
}
public void testRenameWithPrefix3() {
prefix = "a";
test("function Foo() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
prefix = DEFAULT_PREFIX;
}
public void testNamingBasedOnOrderOfOccurrence() {
test("var q,p,m,n,l,k; " +
"(function (r) {}); try { } catch(s) {}; var t = q + q;",
"var a,b,c,d,e,f; " +
"(function(g) {}); try { } catch(h) {}; var i = a + a;"
);
test("function(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z," +
"a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,$){};" +
"var a4,a3,a2,a1,b4,b3,b2,b1,ab,ac,ad,fg;function foo(){};",
"function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
"A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$){};" +
"var aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la;function ma(){};");
}
public void testStableRenameSimple() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();", expectedVariableMap);
expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c", "L 2", "d");
testRenameMapUsingOldMap("function Foo(v1, v2, v3) {return v1;} Foo();",
"function a(b, c, d) {return b;} a();", expectedVariableMap);
}
public void testStableRenameGlobals() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "Bar", "b", "y", "c", "x", "d");
testRenameMap("var Foo; var Bar, y; function x() { Bar++; }",
"var a; var b, c; function d() { b++; }",
expectedVariableMap);
expectedVariableMap = makeVariableMap(
"Foo", "a", "Bar", "b", "y", "c", "x", "d", "Baz", "f", "L 0" , "e");
testRenameMapUsingOldMap(
"var Foo, Baz; var Bar, y; function x(R) { return R + Bar++; }",
"var a, f; var b, c; function d(e) { return e + b++; }",
expectedVariableMap);
}
public void testStableRenameWithPointlesslyAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b");
testRenameMap("function (v1, v2) {}; function (v3, v4) {};",
"function (a, b) {}; function (a, b) {};",
expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b", "L 2", "c");
testRenameMapUsingOldMap("function (v0, v1, v2) {}; function (v3, v4) {};",
"function (a, b, c) {}; function (a, b) {};",
expectedVariableMap);
}
public void testStableRenameLocalsClashingWithGlobals() {
test("function a(v1, v2) {return v1;} a();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
test("function bar(){return;}function a(v1, v2) {return v1;} a();",
"function d(){return;}function a(b, c) {return b;} a();");
}
public void testStableRenameNested() {
VariableMap expectedVariableMap = makeVariableMap(
"f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e");
testRenameMap("function f1(v1, v2) { (function(v3, v4) {}) }",
"function a(b, c) { (function(d, e) {}) }",
expectedVariableMap);
expectedVariableMap = makeVariableMap(
"f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e", "L 4", "f");
testRenameMapUsingOldMap("function f1(v1, v2) { (function(v3, v4, v5) {}) }",
"function a(b, c) { (function(d, e, f) {}) }",
expectedVariableMap);
}
public void testStableRenameWithExterns1() {
String externs = "var foo;";
test(externs, "var bar; foo(bar);", "var a; foo(a);", null, null);
previouslyUsedMap = renameVars.getVariableMap();
test(externs, "var bar, baz; foo(bar, baz);",
"var a, b; foo(a, b);", null, null);
}
public void testStableRenameWithExterns2() {
String externs = "var a;";
test(externs, "var b = 5", "var b = 5", null, null);
previouslyUsedMap = renameVars.getVariableMap();
test(externs, "var b = 5, catty = 9;", "var b = 5, c=9;", null, null);
}
public void testStableRenameWithNameOverlap() {
test("var a = 1; var b = 2; b + b;",
"var a = 1; var b = 2; b + b;");
previouslyUsedMap = renameVars.getVariableMap();
test("var a = 1; var c, b = 2; b + b;",
"var a = 1; var c, b = 2; b + b;");
}
public void testStableRenameWithAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "foo", "b");
testRenameMap("function foo(bar){return bar;}foo(function(h){return h;});",
"function b(a){return a}b(function(a){return a;})",
expectedVariableMap);
expectedVariableMap = makeVariableMap("foo", "b", "L 0", "a", "L 1", "c");
testRenameMapUsingOldMap(
"function foo(bar) {return bar;}foo(function(g,h) {return g+h;});",
"function b(a){return a}b(function(a,c){return a+c;})",
expectedVariableMap);
}
public void testStableRenameSimpleExternsChanges() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();", expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "b", "L 1", "c", "L 2", "a");
String externs = "var Foo;";
testRenameMapUsingOldMap(externs,
"function Foo(v1, v2, v0) {return v1;} Foo();",
"function Foo(b, c, a) {return b;} Foo();",
expectedVariableMap);
}
public void testStableRenameSimpleLocalNameExterned() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var b;";
test(externs, "function Foo(v1, v2) {return v1;} Foo(b);",
"function a(d, c) {return d;} a(b);", null, null);
}
public void testStableRenameSimpleGlobalNameExterned() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var Foo;";
test(externs, "function Foo(v1, v2, v0) {return v1;} Foo();",
"function Foo(b, c, a) {return b;} Foo();", null, null);
}
public void testStableRenameWithPrefix1AndUnstableLocalNames() {
prefix = "PRE_";
test("function Foo(v1, v2) {return v1} Foo();",
"function PRE_(a, b) {return a} PRE_();");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "PRE_";
test("function Foo(v0, v1, v2) {return v1} Foo();",
"function PRE_(a, b, c) {return b} PRE_();");
}
public void testStableRenameWithPrefix2() {
prefix = "a";
test("function Foo() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "a";
test("function Foo() {return 1;}" +
"function Baz() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function ab() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
}
public void testContrivedExampleWhereConsistentRenamingIsWorse() {
previouslyUsedMap = makeVariableMap(
"Foo", "LongString", "L 0", "b", "L 1", "c");
test("function Foo(v1, v2) {return v1;} Foo();",
"function LongString(b, c) {return b;} LongString();");
previouslyUsedMap = renameVars.getVariableMap();
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "LongString", "L 0", "b", "L 1", "c");
assertVariableMapsEqual(expectedVariableMap, previouslyUsedMap);
}
public void testExportSimpleSymbolReservesName() {
test("var goog, x; goog.exportSymbol('a', x);",
"var a, b; a.exportSymbol('a', b);");
withClosurePass = true;
test("var goog, x; goog.exportSymbol('a', x);",
"var b, c; b.exportSymbol('a', c);");
}
public void testExportComplexSymbolReservesName() {
test("var goog, x; goog.exportSymbol('a.b', x);",
"var a, b; a.exportSymbol('a.b', b);");
withClosurePass = true;
test("var goog, x; goog.exportSymbol('a.b', x);",
"var b, c; b.exportSymbol('a.b', c);");
}
public void testExportToNonStringDoesntExplode() {
withClosurePass = true;
test("var goog, a, b; goog.exportSymbol(a, b);",
"var a, b, c; a.exportSymbol(b, c);");
}
public void testDollarSignSuperExport1() {
useGoogleCodingConvention = false;
// See http://code.google.com/p/closure-compiler/issues/detail?id=32
test("var x = function($super,duper,$fantastic){}",
"var c = function($super, a, b){}");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test("var x = function($super,duper,$fantastic){}",
"var c = function($super,a,b){}");
}
public void testDollarSignSuperExport2() {
boolean normalizedExpectedJs = false;
callSuper.enableNormalize(false);
useGoogleCodingConvention = false;
// See http://code.google.com/p/closure-compiler/issues/detail?id=32
test("var x = function($super,duper,$fantastic){};" +
"var y = function($super,duper){};",
"var c = function($super, a, b){};" +
"var d = function($super, a){};");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test("var x = function($super,duper,$fantastic){};" +
"var y = function($super,duper){};",
"var c = function($super, a, b ){};" +
"var d = function($super,a){};");
callSuper.disableNormalize();
}
public void testPseudoNames() {
generatePseudoNames = false;
// See http://code.google.com/p/closure-compiler/issues/detail?id=32
test("var foo = function(a, b, c){}",
"var d = function(a, b, c){}");
generatePseudoNames = true;
test("var foo = function(a, b, c){}",
"var $foo$$ = function($a$$, $b$$, $c$$){}");
test("var a = function(a, b, c){}",
"var $a$$ = function($a$$, $b$$, $c$$){}");
}
private void testRenameMapUsingOldMap(String input, String expected,
VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap("", input, expected, expectedMap);
}
private void testRenameMapUsingOldMap(String externs, String input,
String expected,
VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap(externs, input, expected, expectedMap);
}
private void testRenameMap(String input, String expected,
VariableMap expectedRenameMap) {
testRenameMap("", input, expected, expectedRenameMap);
}
private void testRenameMap(String externs, String input, String expected,
VariableMap expectedRenameMap) {
test(externs, input, expected, null, null);
VariableMap renameMap = renameVars.getVariableMap();
assertVariableMapsEqual(expectedRenameMap, renameMap);
}
private VariableMap makeVariableMap(String... keyValPairs) {
Preconditions.checkArgument(keyValPairs.length % 2 == 0);
Map<String, String> renameMap = new HashMap<String, String>();
for (int i = 0; i < keyValPairs.length; i += 2) {
renameMap.put(keyValPairs[i], keyValPairs[i + 1]);
}
return new VariableMap(renameMap);
}
private static void assertVariableMapsEqual(VariableMap a, VariableMap b) {
Map<String, String> ma = a.getOriginalNameToNewNameMap();
Map<String, String> mb = b.getOriginalNameToNewNameMap();
assertEquals("VariableMaps not equal", ma, mb);
}
private class ClosurePassAndRenameVars implements CompilerPass {
private final Compiler compiler;
private ClosurePassAndRenameVars(Compiler compiler) {
this.compiler = compiler;
}
public void process(Node externs, Node root) {
ProcessClosurePrimitives closurePass =
new ProcessClosurePrimitives(compiler, CheckLevel.WARNING, true);
closurePass.process(externs, root);
renameVars = new RenameVars(compiler, prefix,
false, false, false, previouslyUsedMap, null,
closurePass.getExportedVariableNames());
renameVars.process(externs, root);
}
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.w3._2001.smil20.impl;
import java.math.BigDecimal;
import net.opengis.citygml.CitygmlPackage;
import net.opengis.citygml.appearance.AppearancePackage;
import net.opengis.citygml.appearance.impl.AppearancePackageImpl;
import net.opengis.citygml.building.BuildingPackage;
import net.opengis.citygml.building.impl.BuildingPackageImpl;
import net.opengis.citygml.cityfurniture.CityfurniturePackage;
import net.opengis.citygml.cityfurniture.impl.CityfurniturePackageImpl;
import net.opengis.citygml.cityobjectgroup.CityobjectgroupPackage;
import net.opengis.citygml.cityobjectgroup.impl.CityobjectgroupPackageImpl;
import net.opengis.citygml.generics.GenericsPackage;
import net.opengis.citygml.generics.impl.GenericsPackageImpl;
import net.opengis.citygml.impl.CitygmlPackageImpl;
import net.opengis.citygml.landuse.LandusePackage;
import net.opengis.citygml.landuse.impl.LandusePackageImpl;
import net.opengis.citygml.relief.ReliefPackage;
import net.opengis.citygml.relief.impl.ReliefPackageImpl;
import net.opengis.citygml.texturedsurface.TexturedsurfacePackage;
import net.opengis.citygml.texturedsurface.impl.TexturedsurfacePackageImpl;
import net.opengis.citygml.transportation.TransportationPackage;
import net.opengis.citygml.transportation.impl.TransportationPackageImpl;
import net.opengis.citygml.vegetation.VegetationPackage;
import net.opengis.citygml.vegetation.impl.VegetationPackageImpl;
import net.opengis.citygml.waterbody.WaterbodyPackage;
import net.opengis.citygml.waterbody.impl.WaterbodyPackageImpl;
import net.opengis.gml.GmlPackage;
import net.opengis.gml.impl.GmlPackageImpl;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EDataType;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EValidator;
import org.eclipse.emf.ecore.impl.EPackageImpl;
import org.eclipse.emf.ecore.xml.type.XMLTypePackage;
import org.oasis.xAL.XALPackage;
import org.oasis.xAL.impl.XALPackageImpl;
import org.w3._1999.xlink.XlinkPackage;
import org.w3._1999.xlink.impl.XlinkPackageImpl;
import org.w3._2001.smil20.AccumulateType;
import org.w3._2001.smil20.AdditiveType;
import org.w3._2001.smil20.AnimateColorPrototype;
import org.w3._2001.smil20.AnimateMotionPrototype;
import org.w3._2001.smil20.AnimatePrototype;
import org.w3._2001.smil20.AttributeTypeType;
import org.w3._2001.smil20.CalcModeType;
import org.w3._2001.smil20.DocumentRoot;
import org.w3._2001.smil20.FillDefaultType;
import org.w3._2001.smil20.FillTimingAttrsType;
import org.w3._2001.smil20.RestartDefaultType;
import org.w3._2001.smil20.RestartTimingType;
import org.w3._2001.smil20.SetPrototype;
import org.w3._2001.smil20.Smil20Factory;
import org.w3._2001.smil20.Smil20Package;
import org.w3._2001.smil20.SyncBehaviorDefaultType;
import org.w3._2001.smil20.SyncBehaviorType;
import org.w3._2001.smil20.language.LanguagePackage;
import org.w3._2001.smil20.language.impl.LanguagePackageImpl;
import org.w3._2001.smil20.util.Smil20Validator;
import org.w3.xml._1998.namespace.NamespacePackage;
import org.w3.xml._1998.namespace.impl.NamespacePackageImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class Smil20PackageImpl extends EPackageImpl implements Smil20Package {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass animateColorPrototypeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass animateMotionPrototypeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass animatePrototypeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass documentRootEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass setPrototypeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum accumulateTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum additiveTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum attributeTypeTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum calcModeTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum fillDefaultTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum fillTimingAttrsTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum restartDefaultTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum restartTimingTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum syncBehaviorDefaultTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum syncBehaviorTypeEEnum = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType accumulateTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType additiveTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType attributeTypeTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType calcModeTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType fillDefaultTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType fillTimingAttrsTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType nonNegativeDecimalTypeEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType restartDefaultTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType restartTimingTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType syncBehaviorDefaultTypeObjectEDataType = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EDataType syncBehaviorTypeObjectEDataType = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see org.w3._2001.smil20.Smil20Package#eNS_URI
* @see #init()
* @generated
*/
private Smil20PackageImpl() {
super(eNS_URI, Smil20Factory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link Smil20Package#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static Smil20Package init() {
if (isInited) return (Smil20Package)EPackage.Registry.INSTANCE.getEPackage(Smil20Package.eNS_URI);
// Obtain or create and register package
Smil20PackageImpl theSmil20Package = (Smil20PackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof Smil20PackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new Smil20PackageImpl());
isInited = true;
// Initialize simple dependencies
XMLTypePackage.eINSTANCE.eClass();
// Obtain or create and register interdependencies
BuildingPackageImpl theBuildingPackage = (BuildingPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(BuildingPackage.eNS_URI) instanceof BuildingPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(BuildingPackage.eNS_URI) : BuildingPackage.eINSTANCE);
CitygmlPackageImpl theCitygmlPackage = (CitygmlPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(CitygmlPackage.eNS_URI) instanceof CitygmlPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(CitygmlPackage.eNS_URI) : CitygmlPackage.eINSTANCE);
GmlPackageImpl theGmlPackage = (GmlPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(GmlPackage.eNS_URI) instanceof GmlPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(GmlPackage.eNS_URI) : GmlPackage.eINSTANCE);
XlinkPackageImpl theXlinkPackage = (XlinkPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(XlinkPackage.eNS_URI) instanceof XlinkPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(XlinkPackage.eNS_URI) : XlinkPackage.eINSTANCE);
XALPackageImpl theXALPackage = (XALPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(XALPackage.eNS_URI) instanceof XALPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(XALPackage.eNS_URI) : XALPackage.eINSTANCE);
TexturedsurfacePackageImpl theTexturedsurfacePackage = (TexturedsurfacePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(TexturedsurfacePackage.eNS_URI) instanceof TexturedsurfacePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(TexturedsurfacePackage.eNS_URI) : TexturedsurfacePackage.eINSTANCE);
AppearancePackageImpl theAppearancePackage = (AppearancePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(AppearancePackage.eNS_URI) instanceof AppearancePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(AppearancePackage.eNS_URI) : AppearancePackage.eINSTANCE);
TransportationPackageImpl theTransportationPackage = (TransportationPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(TransportationPackage.eNS_URI) instanceof TransportationPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(TransportationPackage.eNS_URI) : TransportationPackage.eINSTANCE);
ReliefPackageImpl theReliefPackage = (ReliefPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(ReliefPackage.eNS_URI) instanceof ReliefPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(ReliefPackage.eNS_URI) : ReliefPackage.eINSTANCE);
CityfurniturePackageImpl theCityfurniturePackage = (CityfurniturePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(CityfurniturePackage.eNS_URI) instanceof CityfurniturePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(CityfurniturePackage.eNS_URI) : CityfurniturePackage.eINSTANCE);
CityobjectgroupPackageImpl theCityobjectgroupPackage = (CityobjectgroupPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(CityobjectgroupPackage.eNS_URI) instanceof CityobjectgroupPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(CityobjectgroupPackage.eNS_URI) : CityobjectgroupPackage.eINSTANCE);
LandusePackageImpl theLandusePackage = (LandusePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(LandusePackage.eNS_URI) instanceof LandusePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(LandusePackage.eNS_URI) : LandusePackage.eINSTANCE);
VegetationPackageImpl theVegetationPackage = (VegetationPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(VegetationPackage.eNS_URI) instanceof VegetationPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(VegetationPackage.eNS_URI) : VegetationPackage.eINSTANCE);
WaterbodyPackageImpl theWaterbodyPackage = (WaterbodyPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(WaterbodyPackage.eNS_URI) instanceof WaterbodyPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(WaterbodyPackage.eNS_URI) : WaterbodyPackage.eINSTANCE);
GenericsPackageImpl theGenericsPackage = (GenericsPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(GenericsPackage.eNS_URI) instanceof GenericsPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(GenericsPackage.eNS_URI) : GenericsPackage.eINSTANCE);
LanguagePackageImpl theLanguagePackage = (LanguagePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(LanguagePackage.eNS_URI) instanceof LanguagePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(LanguagePackage.eNS_URI) : LanguagePackage.eINSTANCE);
NamespacePackageImpl theNamespacePackage = (NamespacePackageImpl)(EPackage.Registry.INSTANCE.getEPackage(NamespacePackage.eNS_URI) instanceof NamespacePackageImpl ? EPackage.Registry.INSTANCE.getEPackage(NamespacePackage.eNS_URI) : NamespacePackage.eINSTANCE);
// Load packages
theGmlPackage.loadPackage();
theXALPackage.loadPackage();
// Create package meta-data objects
theSmil20Package.createPackageContents();
theBuildingPackage.createPackageContents();
theCitygmlPackage.createPackageContents();
theXlinkPackage.createPackageContents();
theTexturedsurfacePackage.createPackageContents();
theAppearancePackage.createPackageContents();
theTransportationPackage.createPackageContents();
theReliefPackage.createPackageContents();
theCityfurniturePackage.createPackageContents();
theCityobjectgroupPackage.createPackageContents();
theLandusePackage.createPackageContents();
theVegetationPackage.createPackageContents();
theWaterbodyPackage.createPackageContents();
theGenericsPackage.createPackageContents();
theLanguagePackage.createPackageContents();
theNamespacePackage.createPackageContents();
// Initialize created meta-data
theSmil20Package.initializePackageContents();
theBuildingPackage.initializePackageContents();
theCitygmlPackage.initializePackageContents();
theXlinkPackage.initializePackageContents();
theTexturedsurfacePackage.initializePackageContents();
theAppearancePackage.initializePackageContents();
theTransportationPackage.initializePackageContents();
theReliefPackage.initializePackageContents();
theCityfurniturePackage.initializePackageContents();
theCityobjectgroupPackage.initializePackageContents();
theLandusePackage.initializePackageContents();
theVegetationPackage.initializePackageContents();
theWaterbodyPackage.initializePackageContents();
theGenericsPackage.initializePackageContents();
theLanguagePackage.initializePackageContents();
theNamespacePackage.initializePackageContents();
// Fix loaded packages
theGmlPackage.fixPackageContents();
theXALPackage.fixPackageContents();
// Register package validator
EValidator.Registry.INSTANCE.put
(theSmil20Package,
new EValidator.Descriptor() {
public EValidator getEValidator() {
return Smil20Validator.INSTANCE;
}
});
// Mark meta-data to indicate it can't be changed
theSmil20Package.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(Smil20Package.eNS_URI, theSmil20Package);
return theSmil20Package;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getAnimateColorPrototype() {
return animateColorPrototypeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_Accumulate() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_Additive() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_AttributeName() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_AttributeType() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_By() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_From() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(5);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_To() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(6);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateColorPrototype_Values() {
return (EAttribute)animateColorPrototypeEClass.getEStructuralFeatures().get(7);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getAnimateMotionPrototype() {
return animateMotionPrototypeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_Accumulate() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_Additive() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_By() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_From() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_Origin() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_To() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(5);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimateMotionPrototype_Values() {
return (EAttribute)animateMotionPrototypeEClass.getEStructuralFeatures().get(6);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getAnimatePrototype() {
return animatePrototypeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_Accumulate() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_Additive() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_AttributeName() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_AttributeType() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_By() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_From() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(5);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_To() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(6);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAnimatePrototype_Values() {
return (EAttribute)animatePrototypeEClass.getEStructuralFeatures().get(7);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getDocumentRoot() {
return documentRootEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getDocumentRoot_Mixed() {
return (EAttribute)documentRootEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_XMLNSPrefixMap() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_XSISchemaLocation() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_Animate() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_AnimateColor() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_AnimateMotion() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(5);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDocumentRoot_Set() {
return (EReference)documentRootEClass.getEStructuralFeatures().get(6);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getSetPrototype() {
return setPrototypeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getSetPrototype_AttributeName() {
return (EAttribute)setPrototypeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getSetPrototype_AttributeType() {
return (EAttribute)setPrototypeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getSetPrototype_To() {
return (EAttribute)setPrototypeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getAccumulateType() {
return accumulateTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getAdditiveType() {
return additiveTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getAttributeTypeType() {
return attributeTypeTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getCalcModeType() {
return calcModeTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getFillDefaultType() {
return fillDefaultTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getFillTimingAttrsType() {
return fillTimingAttrsTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getRestartDefaultType() {
return restartDefaultTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getRestartTimingType() {
return restartTimingTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getSyncBehaviorDefaultType() {
return syncBehaviorDefaultTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getSyncBehaviorType() {
return syncBehaviorTypeEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getAccumulateTypeObject() {
return accumulateTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getAdditiveTypeObject() {
return additiveTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getAttributeTypeTypeObject() {
return attributeTypeTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getCalcModeTypeObject() {
return calcModeTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getFillDefaultTypeObject() {
return fillDefaultTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getFillTimingAttrsTypeObject() {
return fillTimingAttrsTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getNonNegativeDecimalType() {
return nonNegativeDecimalTypeEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getRestartDefaultTypeObject() {
return restartDefaultTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getRestartTimingTypeObject() {
return restartTimingTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getSyncBehaviorDefaultTypeObject() {
return syncBehaviorDefaultTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EDataType getSyncBehaviorTypeObject() {
return syncBehaviorTypeObjectEDataType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Smil20Factory getSmil20Factory() {
return (Smil20Factory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
animateColorPrototypeEClass = createEClass(ANIMATE_COLOR_PROTOTYPE);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__ACCUMULATE);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__ADDITIVE);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__ATTRIBUTE_NAME);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__ATTRIBUTE_TYPE);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__BY);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__FROM);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__TO);
createEAttribute(animateColorPrototypeEClass, ANIMATE_COLOR_PROTOTYPE__VALUES);
animateMotionPrototypeEClass = createEClass(ANIMATE_MOTION_PROTOTYPE);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__ACCUMULATE);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__ADDITIVE);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__BY);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__FROM);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__ORIGIN);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__TO);
createEAttribute(animateMotionPrototypeEClass, ANIMATE_MOTION_PROTOTYPE__VALUES);
animatePrototypeEClass = createEClass(ANIMATE_PROTOTYPE);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__ACCUMULATE);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__ADDITIVE);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__ATTRIBUTE_NAME);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__ATTRIBUTE_TYPE);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__BY);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__FROM);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__TO);
createEAttribute(animatePrototypeEClass, ANIMATE_PROTOTYPE__VALUES);
documentRootEClass = createEClass(DOCUMENT_ROOT);
createEAttribute(documentRootEClass, DOCUMENT_ROOT__MIXED);
createEReference(documentRootEClass, DOCUMENT_ROOT__XMLNS_PREFIX_MAP);
createEReference(documentRootEClass, DOCUMENT_ROOT__XSI_SCHEMA_LOCATION);
createEReference(documentRootEClass, DOCUMENT_ROOT__ANIMATE);
createEReference(documentRootEClass, DOCUMENT_ROOT__ANIMATE_COLOR);
createEReference(documentRootEClass, DOCUMENT_ROOT__ANIMATE_MOTION);
createEReference(documentRootEClass, DOCUMENT_ROOT__SET);
setPrototypeEClass = createEClass(SET_PROTOTYPE);
createEAttribute(setPrototypeEClass, SET_PROTOTYPE__ATTRIBUTE_NAME);
createEAttribute(setPrototypeEClass, SET_PROTOTYPE__ATTRIBUTE_TYPE);
createEAttribute(setPrototypeEClass, SET_PROTOTYPE__TO);
// Create enums
accumulateTypeEEnum = createEEnum(ACCUMULATE_TYPE);
additiveTypeEEnum = createEEnum(ADDITIVE_TYPE);
attributeTypeTypeEEnum = createEEnum(ATTRIBUTE_TYPE_TYPE);
calcModeTypeEEnum = createEEnum(CALC_MODE_TYPE);
fillDefaultTypeEEnum = createEEnum(FILL_DEFAULT_TYPE);
fillTimingAttrsTypeEEnum = createEEnum(FILL_TIMING_ATTRS_TYPE);
restartDefaultTypeEEnum = createEEnum(RESTART_DEFAULT_TYPE);
restartTimingTypeEEnum = createEEnum(RESTART_TIMING_TYPE);
syncBehaviorDefaultTypeEEnum = createEEnum(SYNC_BEHAVIOR_DEFAULT_TYPE);
syncBehaviorTypeEEnum = createEEnum(SYNC_BEHAVIOR_TYPE);
// Create data types
accumulateTypeObjectEDataType = createEDataType(ACCUMULATE_TYPE_OBJECT);
additiveTypeObjectEDataType = createEDataType(ADDITIVE_TYPE_OBJECT);
attributeTypeTypeObjectEDataType = createEDataType(ATTRIBUTE_TYPE_TYPE_OBJECT);
calcModeTypeObjectEDataType = createEDataType(CALC_MODE_TYPE_OBJECT);
fillDefaultTypeObjectEDataType = createEDataType(FILL_DEFAULT_TYPE_OBJECT);
fillTimingAttrsTypeObjectEDataType = createEDataType(FILL_TIMING_ATTRS_TYPE_OBJECT);
nonNegativeDecimalTypeEDataType = createEDataType(NON_NEGATIVE_DECIMAL_TYPE);
restartDefaultTypeObjectEDataType = createEDataType(RESTART_DEFAULT_TYPE_OBJECT);
restartTimingTypeObjectEDataType = createEDataType(RESTART_TIMING_TYPE_OBJECT);
syncBehaviorDefaultTypeObjectEDataType = createEDataType(SYNC_BEHAVIOR_DEFAULT_TYPE_OBJECT);
syncBehaviorTypeObjectEDataType = createEDataType(SYNC_BEHAVIOR_TYPE_OBJECT);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Obtain other dependent packages
XMLTypePackage theXMLTypePackage = (XMLTypePackage)EPackage.Registry.INSTANCE.getEPackage(XMLTypePackage.eNS_URI);
LanguagePackage theLanguagePackage = (LanguagePackage)EPackage.Registry.INSTANCE.getEPackage(LanguagePackage.eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
// Initialize classes and features; add operations and parameters
initEClass(animateColorPrototypeEClass, AnimateColorPrototype.class, "AnimateColorPrototype", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getAnimateColorPrototype_Accumulate(), this.getAccumulateType(), "accumulate", "none", 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_Additive(), this.getAdditiveType(), "additive", "replace", 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_AttributeName(), theXMLTypePackage.getString(), "attributeName", null, 1, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_AttributeType(), this.getAttributeTypeType(), "attributeType", "auto", 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_By(), theXMLTypePackage.getString(), "by", null, 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_From(), theXMLTypePackage.getString(), "from", null, 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_To(), theXMLTypePackage.getString(), "to", null, 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateColorPrototype_Values(), theXMLTypePackage.getString(), "values", null, 0, 1, AnimateColorPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(animateMotionPrototypeEClass, AnimateMotionPrototype.class, "AnimateMotionPrototype", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getAnimateMotionPrototype_Accumulate(), this.getAccumulateType(), "accumulate", "none", 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_Additive(), this.getAdditiveType(), "additive", "replace", 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_By(), theXMLTypePackage.getString(), "by", null, 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_From(), theXMLTypePackage.getString(), "from", null, 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_Origin(), theXMLTypePackage.getString(), "origin", null, 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_To(), theXMLTypePackage.getString(), "to", null, 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimateMotionPrototype_Values(), theXMLTypePackage.getString(), "values", null, 0, 1, AnimateMotionPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(animatePrototypeEClass, AnimatePrototype.class, "AnimatePrototype", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getAnimatePrototype_Accumulate(), this.getAccumulateType(), "accumulate", "none", 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_Additive(), this.getAdditiveType(), "additive", "replace", 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_AttributeName(), theXMLTypePackage.getString(), "attributeName", null, 1, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_AttributeType(), this.getAttributeTypeType(), "attributeType", "auto", 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_By(), theXMLTypePackage.getString(), "by", null, 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_From(), theXMLTypePackage.getString(), "from", null, 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_To(), theXMLTypePackage.getString(), "to", null, 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getAnimatePrototype_Values(), theXMLTypePackage.getString(), "values", null, 0, 1, AnimatePrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(documentRootEClass, DocumentRoot.class, "DocumentRoot", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getDocumentRoot_Mixed(), ecorePackage.getEFeatureMapEntry(), "mixed", null, 0, -1, null, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, !IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_XMLNSPrefixMap(), ecorePackage.getEStringToStringMapEntry(), null, "xMLNSPrefixMap", null, 0, -1, null, IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_XSISchemaLocation(), ecorePackage.getEStringToStringMapEntry(), null, "xSISchemaLocation", null, 0, -1, null, IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_Animate(), theLanguagePackage.getAnimateType(), null, "animate", null, 0, -2, null, IS_TRANSIENT, IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_AnimateColor(), theLanguagePackage.getAnimateColorType(), null, "animateColor", null, 0, -2, null, IS_TRANSIENT, IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_AnimateMotion(), theLanguagePackage.getAnimateMotionType(), null, "animateMotion", null, 0, -2, null, IS_TRANSIENT, IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, IS_DERIVED, IS_ORDERED);
initEReference(getDocumentRoot_Set(), theLanguagePackage.getSetType(), null, "set", null, 0, -2, null, IS_TRANSIENT, IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, IS_DERIVED, IS_ORDERED);
initEClass(setPrototypeEClass, SetPrototype.class, "SetPrototype", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getSetPrototype_AttributeName(), theXMLTypePackage.getString(), "attributeName", null, 1, 1, SetPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getSetPrototype_AttributeType(), this.getAttributeTypeType(), "attributeType", "auto", 0, 1, SetPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getSetPrototype_To(), theXMLTypePackage.getString(), "to", null, 0, 1, SetPrototype.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Initialize enums and add enum literals
initEEnum(accumulateTypeEEnum, AccumulateType.class, "AccumulateType");
addEEnumLiteral(accumulateTypeEEnum, AccumulateType.NONE);
addEEnumLiteral(accumulateTypeEEnum, AccumulateType.SUM);
initEEnum(additiveTypeEEnum, AdditiveType.class, "AdditiveType");
addEEnumLiteral(additiveTypeEEnum, AdditiveType.REPLACE);
addEEnumLiteral(additiveTypeEEnum, AdditiveType.SUM);
initEEnum(attributeTypeTypeEEnum, AttributeTypeType.class, "AttributeTypeType");
addEEnumLiteral(attributeTypeTypeEEnum, AttributeTypeType.XML);
addEEnumLiteral(attributeTypeTypeEEnum, AttributeTypeType.CSS);
addEEnumLiteral(attributeTypeTypeEEnum, AttributeTypeType.AUTO);
initEEnum(calcModeTypeEEnum, CalcModeType.class, "CalcModeType");
addEEnumLiteral(calcModeTypeEEnum, CalcModeType.DISCRETE);
addEEnumLiteral(calcModeTypeEEnum, CalcModeType.LINEAR);
addEEnumLiteral(calcModeTypeEEnum, CalcModeType.PACED);
initEEnum(fillDefaultTypeEEnum, FillDefaultType.class, "FillDefaultType");
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.REMOVE);
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.FREEZE);
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.HOLD);
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.AUTO);
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.INHERIT);
addEEnumLiteral(fillDefaultTypeEEnum, FillDefaultType.TRANSITION);
initEEnum(fillTimingAttrsTypeEEnum, FillTimingAttrsType.class, "FillTimingAttrsType");
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.REMOVE);
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.FREEZE);
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.HOLD);
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.AUTO);
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.DEFAULT);
addEEnumLiteral(fillTimingAttrsTypeEEnum, FillTimingAttrsType.TRANSITION);
initEEnum(restartDefaultTypeEEnum, RestartDefaultType.class, "RestartDefaultType");
addEEnumLiteral(restartDefaultTypeEEnum, RestartDefaultType.NEVER);
addEEnumLiteral(restartDefaultTypeEEnum, RestartDefaultType.ALWAYS);
addEEnumLiteral(restartDefaultTypeEEnum, RestartDefaultType.WHEN_NOT_ACTIVE);
addEEnumLiteral(restartDefaultTypeEEnum, RestartDefaultType.INHERIT);
initEEnum(restartTimingTypeEEnum, RestartTimingType.class, "RestartTimingType");
addEEnumLiteral(restartTimingTypeEEnum, RestartTimingType.NEVER);
addEEnumLiteral(restartTimingTypeEEnum, RestartTimingType.ALWAYS);
addEEnumLiteral(restartTimingTypeEEnum, RestartTimingType.WHEN_NOT_ACTIVE);
addEEnumLiteral(restartTimingTypeEEnum, RestartTimingType.DEFAULT);
initEEnum(syncBehaviorDefaultTypeEEnum, SyncBehaviorDefaultType.class, "SyncBehaviorDefaultType");
addEEnumLiteral(syncBehaviorDefaultTypeEEnum, SyncBehaviorDefaultType.CAN_SLIP);
addEEnumLiteral(syncBehaviorDefaultTypeEEnum, SyncBehaviorDefaultType.LOCKED);
addEEnumLiteral(syncBehaviorDefaultTypeEEnum, SyncBehaviorDefaultType.INDEPENDENT);
addEEnumLiteral(syncBehaviorDefaultTypeEEnum, SyncBehaviorDefaultType.INHERIT);
initEEnum(syncBehaviorTypeEEnum, SyncBehaviorType.class, "SyncBehaviorType");
addEEnumLiteral(syncBehaviorTypeEEnum, SyncBehaviorType.CAN_SLIP);
addEEnumLiteral(syncBehaviorTypeEEnum, SyncBehaviorType.LOCKED);
addEEnumLiteral(syncBehaviorTypeEEnum, SyncBehaviorType.INDEPENDENT);
addEEnumLiteral(syncBehaviorTypeEEnum, SyncBehaviorType.DEFAULT);
// Initialize data types
initEDataType(accumulateTypeObjectEDataType, AccumulateType.class, "AccumulateTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(additiveTypeObjectEDataType, AdditiveType.class, "AdditiveTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(attributeTypeTypeObjectEDataType, AttributeTypeType.class, "AttributeTypeTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(calcModeTypeObjectEDataType, CalcModeType.class, "CalcModeTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(fillDefaultTypeObjectEDataType, FillDefaultType.class, "FillDefaultTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(fillTimingAttrsTypeObjectEDataType, FillTimingAttrsType.class, "FillTimingAttrsTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(nonNegativeDecimalTypeEDataType, BigDecimal.class, "NonNegativeDecimalType", IS_SERIALIZABLE, !IS_GENERATED_INSTANCE_CLASS);
initEDataType(restartDefaultTypeObjectEDataType, RestartDefaultType.class, "RestartDefaultTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(restartTimingTypeObjectEDataType, RestartTimingType.class, "RestartTimingTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(syncBehaviorDefaultTypeObjectEDataType, SyncBehaviorDefaultType.class, "SyncBehaviorDefaultTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
initEDataType(syncBehaviorTypeObjectEDataType, SyncBehaviorType.class, "SyncBehaviorTypeObject", IS_SERIALIZABLE, IS_GENERATED_INSTANCE_CLASS);
// Create resource
createResource(eNS_URI);
// Create annotations
// http:///org/eclipse/emf/ecore/util/ExtendedMetaData
createExtendedMetaDataAnnotations();
}
/**
* Initializes the annotations for <b>http:///org/eclipse/emf/ecore/util/ExtendedMetaData</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createExtendedMetaDataAnnotations() {
String source = "http:///org/eclipse/emf/ecore/util/ExtendedMetaData";
addAnnotation
(accumulateTypeEEnum,
source,
new String[] {
"name", "accumulate_._type"
});
addAnnotation
(accumulateTypeObjectEDataType,
source,
new String[] {
"name", "accumulate_._type:Object",
"baseType", "accumulate_._type"
});
addAnnotation
(additiveTypeEEnum,
source,
new String[] {
"name", "additive_._type"
});
addAnnotation
(additiveTypeObjectEDataType,
source,
new String[] {
"name", "additive_._type:Object",
"baseType", "additive_._type"
});
addAnnotation
(animateColorPrototypeEClass,
source,
new String[] {
"name", "animateColorPrototype",
"kind", "empty"
});
addAnnotation
(getAnimateColorPrototype_Accumulate(),
source,
new String[] {
"kind", "attribute",
"name", "accumulate"
});
addAnnotation
(getAnimateColorPrototype_Additive(),
source,
new String[] {
"kind", "attribute",
"name", "additive"
});
addAnnotation
(getAnimateColorPrototype_AttributeName(),
source,
new String[] {
"kind", "attribute",
"name", "attributeName"
});
addAnnotation
(getAnimateColorPrototype_AttributeType(),
source,
new String[] {
"kind", "attribute",
"name", "attributeType"
});
addAnnotation
(getAnimateColorPrototype_By(),
source,
new String[] {
"kind", "attribute",
"name", "by"
});
addAnnotation
(getAnimateColorPrototype_From(),
source,
new String[] {
"kind", "attribute",
"name", "from"
});
addAnnotation
(getAnimateColorPrototype_To(),
source,
new String[] {
"kind", "attribute",
"name", "to"
});
addAnnotation
(getAnimateColorPrototype_Values(),
source,
new String[] {
"kind", "attribute",
"name", "values"
});
addAnnotation
(animateMotionPrototypeEClass,
source,
new String[] {
"name", "animateMotionPrototype",
"kind", "empty"
});
addAnnotation
(getAnimateMotionPrototype_Accumulate(),
source,
new String[] {
"kind", "attribute",
"name", "accumulate"
});
addAnnotation
(getAnimateMotionPrototype_Additive(),
source,
new String[] {
"kind", "attribute",
"name", "additive"
});
addAnnotation
(getAnimateMotionPrototype_By(),
source,
new String[] {
"kind", "attribute",
"name", "by"
});
addAnnotation
(getAnimateMotionPrototype_From(),
source,
new String[] {
"kind", "attribute",
"name", "from"
});
addAnnotation
(getAnimateMotionPrototype_Origin(),
source,
new String[] {
"kind", "attribute",
"name", "origin"
});
addAnnotation
(getAnimateMotionPrototype_To(),
source,
new String[] {
"kind", "attribute",
"name", "to"
});
addAnnotation
(getAnimateMotionPrototype_Values(),
source,
new String[] {
"kind", "attribute",
"name", "values"
});
addAnnotation
(animatePrototypeEClass,
source,
new String[] {
"name", "animatePrototype",
"kind", "empty"
});
addAnnotation
(getAnimatePrototype_Accumulate(),
source,
new String[] {
"kind", "attribute",
"name", "accumulate"
});
addAnnotation
(getAnimatePrototype_Additive(),
source,
new String[] {
"kind", "attribute",
"name", "additive"
});
addAnnotation
(getAnimatePrototype_AttributeName(),
source,
new String[] {
"kind", "attribute",
"name", "attributeName"
});
addAnnotation
(getAnimatePrototype_AttributeType(),
source,
new String[] {
"kind", "attribute",
"name", "attributeType"
});
addAnnotation
(getAnimatePrototype_By(),
source,
new String[] {
"kind", "attribute",
"name", "by"
});
addAnnotation
(getAnimatePrototype_From(),
source,
new String[] {
"kind", "attribute",
"name", "from"
});
addAnnotation
(getAnimatePrototype_To(),
source,
new String[] {
"kind", "attribute",
"name", "to"
});
addAnnotation
(getAnimatePrototype_Values(),
source,
new String[] {
"kind", "attribute",
"name", "values"
});
addAnnotation
(attributeTypeTypeEEnum,
source,
new String[] {
"name", "attributeType_._type"
});
addAnnotation
(attributeTypeTypeObjectEDataType,
source,
new String[] {
"name", "attributeType_._type:Object",
"baseType", "attributeType_._type"
});
addAnnotation
(calcModeTypeEEnum,
source,
new String[] {
"name", "calcMode_._type"
});
addAnnotation
(calcModeTypeObjectEDataType,
source,
new String[] {
"name", "calcMode_._type:Object",
"baseType", "calcMode_._type"
});
addAnnotation
(documentRootEClass,
source,
new String[] {
"name", "",
"kind", "mixed"
});
addAnnotation
(getDocumentRoot_Mixed(),
source,
new String[] {
"kind", "elementWildcard",
"name", ":mixed"
});
addAnnotation
(getDocumentRoot_XMLNSPrefixMap(),
source,
new String[] {
"kind", "attribute",
"name", "xmlns:prefix"
});
addAnnotation
(getDocumentRoot_XSISchemaLocation(),
source,
new String[] {
"kind", "attribute",
"name", "xsi:schemaLocation"
});
addAnnotation
(getDocumentRoot_Animate(),
source,
new String[] {
"kind", "element",
"name", "animate",
"namespace", "##targetNamespace",
"affiliation", "http://www.w3.org/2001/SMIL20/Language#animate"
});
addAnnotation
(getDocumentRoot_AnimateColor(),
source,
new String[] {
"kind", "element",
"name", "animateColor",
"namespace", "##targetNamespace",
"affiliation", "http://www.w3.org/2001/SMIL20/Language#animateColor"
});
addAnnotation
(getDocumentRoot_AnimateMotion(),
source,
new String[] {
"kind", "element",
"name", "animateMotion",
"namespace", "##targetNamespace",
"affiliation", "http://www.w3.org/2001/SMIL20/Language#animateMotion"
});
addAnnotation
(getDocumentRoot_Set(),
source,
new String[] {
"kind", "element",
"name", "set",
"namespace", "##targetNamespace",
"affiliation", "http://www.w3.org/2001/SMIL20/Language#set"
});
addAnnotation
(fillDefaultTypeEEnum,
source,
new String[] {
"name", "fillDefaultType"
});
addAnnotation
(fillDefaultTypeObjectEDataType,
source,
new String[] {
"name", "fillDefaultType:Object",
"baseType", "fillDefaultType"
});
addAnnotation
(fillTimingAttrsTypeEEnum,
source,
new String[] {
"name", "fillTimingAttrsType"
});
addAnnotation
(fillTimingAttrsTypeObjectEDataType,
source,
new String[] {
"name", "fillTimingAttrsType:Object",
"baseType", "fillTimingAttrsType"
});
addAnnotation
(nonNegativeDecimalTypeEDataType,
source,
new String[] {
"name", "nonNegativeDecimalType",
"baseType", "http://www.eclipse.org/emf/2003/XMLType#decimal",
"minInclusive", "0.0"
});
addAnnotation
(restartDefaultTypeEEnum,
source,
new String[] {
"name", "restartDefaultType"
});
addAnnotation
(restartDefaultTypeObjectEDataType,
source,
new String[] {
"name", "restartDefaultType:Object",
"baseType", "restartDefaultType"
});
addAnnotation
(restartTimingTypeEEnum,
source,
new String[] {
"name", "restartTimingType"
});
addAnnotation
(restartTimingTypeObjectEDataType,
source,
new String[] {
"name", "restartTimingType:Object",
"baseType", "restartTimingType"
});
addAnnotation
(setPrototypeEClass,
source,
new String[] {
"name", "setPrototype",
"kind", "empty"
});
addAnnotation
(getSetPrototype_AttributeName(),
source,
new String[] {
"kind", "attribute",
"name", "attributeName"
});
addAnnotation
(getSetPrototype_AttributeType(),
source,
new String[] {
"kind", "attribute",
"name", "attributeType"
});
addAnnotation
(getSetPrototype_To(),
source,
new String[] {
"kind", "attribute",
"name", "to"
});
addAnnotation
(syncBehaviorDefaultTypeEEnum,
source,
new String[] {
"name", "syncBehaviorDefaultType"
});
addAnnotation
(syncBehaviorDefaultTypeObjectEDataType,
source,
new String[] {
"name", "syncBehaviorDefaultType:Object",
"baseType", "syncBehaviorDefaultType"
});
addAnnotation
(syncBehaviorTypeEEnum,
source,
new String[] {
"name", "syncBehaviorType"
});
addAnnotation
(syncBehaviorTypeObjectEDataType,
source,
new String[] {
"name", "syncBehaviorType:Object",
"baseType", "syncBehaviorType"
});
}
} //Smil20PackageImpl
| |
package com.zjut.material_wecenter.models;
import java.util.ArrayList;
/**
* Created by Administrator on 2016/1/27.
*/
public class QuestionDetail {
private QuestionInfo question_info;
private ArrayList<TopicInfo> question_topics;
private ArrayList<AnswerInfo> answers;
public QuestionInfo getQuestion_info() {
return question_info;
}
public void setQuestion_info(QuestionInfo question_info) {
this.question_info = question_info;
}
public ArrayList<TopicInfo> getQuestion_topics() {
return question_topics;
}
public void setQuestion_topics(ArrayList<TopicInfo> question_topics) {
this.question_topics = question_topics;
}
public ArrayList<AnswerInfo> getAnswers() {
return answers;
}
public void setAnswers(ArrayList<AnswerInfo> answers) {
this.answers = answers;
}
public static class QuestionInfo {
private int question_id;
private String question_content;
private String question_detail;
private int answer_count;
private int view_count;
private int focus_count;
private int comment_count;
private int thanks_count;
private long add_time;
private long update_time;
private int agree_count;
private int against_count;
private int user_answered;
private int user_thanks;
private int user_follow_check;
private int user_question_focus;
private UserInfoEntity user_info;
public int getQuestion_id() {
return question_id;
}
public void setQuestion_id(int question_id) {
this.question_id = question_id;
}
public String getQuestion_content() {
return question_content;
}
public void setQuestion_content(String question_content) {
this.question_content = question_content;
}
public int getUser_question_focus() {
return user_question_focus;
}
public void setUser_question_focus(int user_question_focus) {
this.user_question_focus = user_question_focus;
}
public int getUser_follow_check() {
return user_follow_check;
}
public void setUser_follow_check(int user_follow_check) {
this.user_follow_check = user_follow_check;
}
public String getQuestion_detail() {
return question_detail;
}
public void setQuestion_detail(String question_detail) {
this.question_detail = question_detail;
}
public int getAnswer_count() {
return answer_count;
}
public void setAnswer_count(int answer_count) {
this.answer_count = answer_count;
}
public int getView_count() {
return view_count;
}
public void setView_count(int view_count) {
this.view_count = view_count;
}
public int getFocus_count() {
return focus_count;
}
public void setFocus_count(int focus_count) {
this.focus_count = focus_count;
}
public int getComment_count() {
return comment_count;
}
public void setComment_count(int comment_count) {
this.comment_count = comment_count;
}
public int getThanks_count() {
return thanks_count;
}
public void setThanks_count(int thanks_count) {
this.thanks_count = thanks_count;
}
public long getAdd_time() {
return add_time;
}
public void setAdd_time(long add_time) {
this.add_time = add_time;
}
public long getUpdate_time() {
return update_time;
}
public void setUpdate_time(long update_time) {
this.update_time = update_time;
}
public int getAgree_count() {
return agree_count;
}
public void setAgree_count(int agree_count) {
this.agree_count = agree_count;
}
public int getAgainst_count() {
return against_count;
}
public void setAgainst_count(int against_count) {
this.against_count = against_count;
}
public int getUser_answered() {
return user_answered;
}
public void setUser_answered(int user_answered) {
this.user_answered = user_answered;
}
public int getUser_thanks() {
return user_thanks;
}
public void setUser_thanks(int user_thanks) {
this.user_thanks = user_thanks;
}
public UserInfoEntity getUser_info() {
return user_info;
}
public void setUser_info(UserInfoEntity user_info) {
this.user_info = user_info;
}
public static class UserInfoEntity {
private long uid;
private String signature;
private String user_name;
private String avatar_file;
public void setUid(long uid) {
this.uid = uid;
}
public String getSignature() {
return signature;
}
public void setSignature(String signature) {
this.signature = signature;
}
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public void setAvatar_file(String avatar_file) {
this.avatar_file = avatar_file;
}
public long getUid() {
return uid;
}
public String getUser_name() {
return user_name;
}
public String getAvatar_file() {
return avatar_file;
}
}
}
public static class TopicInfo{
private int topic_id;
private String topic_title;
public int getTopic_id() {
return topic_id;
}
public void setTopic_id(int topic_id) {
this.topic_id = topic_id;
}
public String getTopic_title() {
return topic_title;
}
public void setTopic_title(String topic_title) {
this.topic_title = topic_title;
}
}
public static class AnswerInfo {
private int answer_id;
private String answer_content;
private int question_id;
private String question_content;
private long add_time;
private int against_count;
private int agree_count;
private int thanks_count;
private int comment_count;
private String publish_source;
private int user_thanks_status;
private int user_vote_status;
private UserInfoEntity user_info;
public int getAnswer_id() {
return answer_id;
}
public void setAnswer_id(int answer_id) {
this.answer_id = answer_id;
}
public String getAnswer_content() {
return answer_content;
}
public void setAnswer_content(String answer_content) {
this.answer_content = answer_content;
}
public int getQuestion_id() {
return question_id;
}
public void setQuestion_id(int question_id) {
this.question_id = question_id;
}
public String getQuestion_content() {
return question_content;
}
public void setQuestion_content(String question_content) {
this.question_content = question_content;
}
public long getAdd_time() {
return add_time;
}
public void setAdd_time(long add_time) {
this.add_time = add_time;
}
public int getAgainst_count() {
return against_count;
}
public void setAgainst_count(int against_count) {
this.against_count = against_count;
}
public int getAgree_count() {
return agree_count;
}
public void setAgree_count(int agree_count) {
this.agree_count = agree_count;
}
public int getThanks_count() {
return thanks_count;
}
public void setThanks_count(int thanks_count) {
this.thanks_count = thanks_count;
}
public int getComment_count() {
return comment_count;
}
public void setComment_count(int comment_count) {
this.comment_count = comment_count;
}
public int getUser_thanks_status() {
return user_thanks_status;
}
public void setUser_thanks_status(int user_thanks_status) {
this.user_thanks_status = user_thanks_status;
}
public int getUser_vote_status() {
return user_vote_status;
}
public void setUser_vote_status(int user_vote_status) {
this.user_vote_status = user_vote_status;
}
public String getPublish_source() {
return publish_source;
}
public void setPublish_source(String publish_source) {
this.publish_source = publish_source;
}
public UserInfoEntity getUser_info() {
return user_info;
}
public void setUser_info(UserInfoEntity user_info) {
this.user_info = user_info;
}
public static class UserInfoEntity {
private long uid;
private String signature;
private String user_name;
private String avatar_file;
public void setUid(long uid) {
this.uid = uid;
}
public String getSignature() {
return signature;
}
public void setSignature(String signature) {
this.signature = signature;
}
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public void setAvatar_file(String avatar_file) {
this.avatar_file = avatar_file;
}
public long getUid() {
return uid;
}
public String getUser_name() {
return user_name;
}
public String getAvatar_file() {
return avatar_file;
}
}
}
}
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.client;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Set;
import javax.servlet.GenericServlet;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
import org.springframework.android.test.Assert;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import android.test.AndroidTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.util.Log;
/**
* @author Arjen Poutsma
* @author Roy Clarkson
*/
public abstract class AbstractRestTemplateIntegrationTests extends AndroidTestCase {
private static final String TAG = getTag();
protected static String getTag() {
return AbstractRestTemplateIntegrationTests.class.getSimpleName();
}
private RestTemplate restTemplate;
private static Server jettyServer;
private static String helloWorld = "H\u00e9llo W\u00f6rld";
private static String baseUrl;
private static MediaType contentType;
@Override
protected void setUp() throws Exception {
super.setUp();
setUpJetty();
this.restTemplate = getRestTemplate();
}
@Override
protected void tearDown() throws Exception {
this.restTemplate = null;
}
private void setUpJetty() throws Exception {
if (jettyServer == null) {
int port = 8181;
jettyServer = new Server(port);
baseUrl = "http://localhost:" + port;
Context jettyContext = new Context(jettyServer, "/");
byte[] bytes = helloWorld.getBytes("UTF-8");
contentType = new MediaType("text", "plain", Collections.singletonMap("charset", "utf-8"));
jettyContext.addServlet(new ServletHolder(new GetServlet(bytes, contentType)), "/get");
jettyContext.addServlet(new ServletHolder(new GetServlet(new byte[0], contentType)), "/get/nothing");
jettyContext.addServlet(new ServletHolder(new GetServlet(bytes, null)), "/get/nocontenttype");
jettyContext.addServlet(new ServletHolder(new ErrorServlet(401)), "/get/notauthorized");
jettyContext.addServlet(new ServletHolder(new PostServlet(helloWorld, baseUrl + "/post/1", bytes,
contentType)), "/post");
jettyContext.addServlet(new ServletHolder(new StatusCodeServlet(204)), "/status/nocontent");
jettyContext.addServlet(new ServletHolder(new StatusCodeServlet(304)), "/status/notmodified");
jettyContext.addServlet(new ServletHolder(new ErrorServlet(401)), "/status/notfound");
jettyContext.addServlet(new ServletHolder(new ErrorServlet(404)), "/status/notfound");
jettyContext.addServlet(new ServletHolder(new ErrorServlet(500)), "/status/server");
jettyContext.addServlet(new ServletHolder(new UriServlet()), "/uri/*");
jettyContext.addServlet(new ServletHolder(new MultipartServlet()), "/multipart");
jettyServer.start();
}
}
private void tearDownJetty() throws Exception {
if (jettyServer != null && jettyServer.isRunning()) {
jettyServer.stop();
while (jettyServer.isStopping()) {
Log.d(TAG, "Stopping Jetty...");
}
if (jettyServer.isStopped()) {
Log.d(TAG, "Jetty is stopped");
jettyServer = null;
}
}
}
protected abstract RestTemplate getRestTemplate();
@MediumTest
public void testGetString() {
String s = restTemplate.getForObject(baseUrl + "/{method}", String.class, "get");
assertEquals("Invalid content", helloWorld, s);
}
@MediumTest
public void testGetEntity() {
ResponseEntity<String> entity = restTemplate.getForEntity(baseUrl + "/{method}", String.class, "get");
assertEquals("Invalid content", helloWorld, entity.getBody());
assertFalse("No headers", entity.getHeaders().isEmpty());
assertEquals("Invalid content-type", contentType, entity.getHeaders().getContentType());
assertEquals("Invalid status code", HttpStatus.OK, entity.getStatusCode());
}
@MediumTest
public void testGetEntityNotAuthorized() {
try {
restTemplate.getForEntity(baseUrl + "/get/notauthorized", String.class);
fail("HttpClientErrorException expected");
} catch (HttpClientErrorException ex) {
assertEquals(HttpStatus.UNAUTHORIZED, ex.getStatusCode());
assertEquals(HttpStatus.UNAUTHORIZED.getReasonPhrase(), ex.getStatusText());
assertNotNull(ex.getResponseBodyAsString());
}
}
@MediumTest
public void testGetNoResponse() {
String s = restTemplate.getForObject(baseUrl + "/get/nothing", String.class);
assertNull("Invalid content", s);
}
@MediumTest
public void testGetNoContentTypeHeader() throws UnsupportedEncodingException {
byte[] bytes = restTemplate.getForObject(baseUrl + "/get/nocontenttype", byte[].class);
Assert.assertArrayEquals("Invalid content", helloWorld.getBytes("UTF-8"), bytes);
}
@MediumTest
public void testGetNoContent() {
String s = restTemplate.getForObject(baseUrl + "/status/nocontent", String.class);
assertNull("Invalid content", s);
ResponseEntity<String> entity = restTemplate.getForEntity(baseUrl + "/status/nocontent", String.class);
assertEquals("Invalid response code", HttpStatus.NO_CONTENT, entity.getStatusCode());
assertNull("Invalid content", entity.getBody());
}
@MediumTest
public void testGetNotModified() {
String s = restTemplate.getForObject(baseUrl + "/status/notmodified", String.class);
assertNull("Invalid content", s);
ResponseEntity<String> entity = restTemplate.getForEntity(baseUrl + "/status/notmodified", String.class);
assertEquals("Invalid response code", HttpStatus.NOT_MODIFIED, entity.getStatusCode());
assertNull("Invalid content", entity.getBody());
}
@MediumTest
public void testPostForLocation() throws URISyntaxException {
URI location = restTemplate.postForLocation(baseUrl + "/{method}", helloWorld, "post");
assertEquals("Invalid location", new URI(baseUrl + "/post/1"), location);
}
@MediumTest
public void testPostForLocationEntity() throws URISyntaxException {
HttpHeaders entityHeaders = new HttpHeaders();
entityHeaders.setContentType(new MediaType("text", "plain", Charset.forName("ISO-8859-15")));
HttpEntity<String> entity = new HttpEntity<String>(helloWorld, entityHeaders);
URI location = restTemplate.postForLocation(baseUrl + "/{method}", entity, "post");
assertEquals("Invalid location", new URI(baseUrl + "/post/1"), location);
}
@MediumTest
public void testPostForObject() throws URISyntaxException {
String s = restTemplate.postForObject(baseUrl + "/{method}", helloWorld, String.class, "post");
assertEquals("Invalid content", helloWorld, s);
}
@MediumTest
public void testNotFound() {
try {
restTemplate.execute(baseUrl + "/status/notfound", HttpMethod.GET, null, null);
fail("HttpClientErrorException expected");
} catch (HttpClientErrorException ex) {
assertEquals(HttpStatus.NOT_FOUND, ex.getStatusCode());
assertNotNull(ex.getStatusText());
assertNotNull(ex.getResponseBodyAsString());
}
}
@MediumTest
public void testServerError() {
try {
restTemplate.execute(baseUrl + "/status/server", HttpMethod.GET, null, null);
fail("HttpServerErrorException expected");
} catch (HttpServerErrorException ex) {
assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, ex.getStatusCode());
assertNotNull(ex.getStatusText());
assertNotNull(ex.getResponseBodyAsString());
}
}
@MediumTest
public void testOptionsForAllow() throws URISyntaxException {
Set<HttpMethod> allowed = restTemplate.optionsForAllow(new URI(baseUrl + "/get"));
assertEquals("Invalid response",
EnumSet.of(HttpMethod.GET, HttpMethod.OPTIONS, HttpMethod.HEAD, HttpMethod.TRACE), allowed);
}
@MediumTest
public void testUri() throws InterruptedException, URISyntaxException {
String result = restTemplate.getForObject(baseUrl + "/uri/{query}", String.class, "Z\u00fcrich");
assertEquals("Invalid request URI", "/uri/Z%C3%BCrich", result);
result = restTemplate.getForObject(baseUrl + "/uri/query={query}", String.class, "foo@bar");
assertEquals("Invalid request URI", "/uri/query=foo@bar", result);
result = restTemplate.getForObject(baseUrl + "/uri/query={query}", String.class, "T\u014dky\u014d");
assertEquals("Invalid request URI", "/uri/query=T%C5%8Dky%C5%8D", result);
}
@MediumTest
public void testMultipart() throws UnsupportedEncodingException {
MultiValueMap<String, Object> parts = new LinkedMultiValueMap<String, Object>();
parts.add("name 1", "value 1");
parts.add("name 2", "value 2+1");
parts.add("name 2", "value 2+2");
Resource logo = new ClassPathResource("res/drawable/icon.png");
parts.add("logo", logo);
restTemplate.postForLocation(baseUrl + "/multipart", parts);
}
@MediumTest
public void testExchangeGet() throws Exception {
HttpHeaders requestHeaders = new HttpHeaders();
requestHeaders.set("MyHeader", "MyValue");
HttpEntity<?> requestEntity = new HttpEntity(requestHeaders);
ResponseEntity<String> response = restTemplate.exchange(baseUrl + "/{method}", HttpMethod.GET, requestEntity,
String.class, "get");
assertEquals("Invalid content", helloWorld, response.getBody());
}
@MediumTest
public void testExchangePost() throws Exception {
HttpHeaders requestHeaders = new HttpHeaders();
requestHeaders.set("MyHeader", "MyValue");
requestHeaders.setContentType(MediaType.TEXT_PLAIN);
HttpEntity<String> requestEntity = new HttpEntity<String>(helloWorld, requestHeaders);
HttpEntity<Void> result = restTemplate.exchange(baseUrl + "/{method}", HttpMethod.POST, requestEntity,
Void.class, "post");
assertEquals("Invalid location", new URI(baseUrl + "/post/1"), result.getHeaders().getLocation());
assertFalse(result.hasBody());
}
/** Servlet that sets the given status code. */
private static class StatusCodeServlet extends GenericServlet {
private static final long serialVersionUID = 1L;
private final int sc;
private StatusCodeServlet(int sc) {
this.sc = sc;
}
@Override
public void service(ServletRequest request, ServletResponse response) throws ServletException, IOException {
((HttpServletResponse) response).setStatus(sc);
}
}
/** Servlet that returns an error message for a given status code. */
private static class ErrorServlet extends GenericServlet {
private static final long serialVersionUID = 1L;
private final int sc;
private ErrorServlet(int sc) {
this.sc = sc;
}
@Override
public void service(ServletRequest request, ServletResponse response) throws ServletException, IOException {
((HttpServletResponse) response).sendError(sc);
}
}
private static class GetServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private final byte[] buf;
private final MediaType contentType;
private GetServlet(byte[] buf, MediaType contentType) {
this.buf = buf;
this.contentType = contentType;
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException,
IOException {
if (contentType != null) {
response.setContentType(contentType.toString());
}
response.setContentLength(buf.length);
FileCopyUtils.copy(buf, response.getOutputStream());
}
}
private static class PostServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private final String s;
private final String location;
private final byte[] buf;
private final MediaType contentType;
private PostServlet(String s, String location, byte[] buf, MediaType contentType) {
this.s = s;
this.location = location;
this.buf = buf;
this.contentType = contentType;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
IOException {
assertTrue("Invalid request content-length", request.getContentLength() > 0);
assertNotNull("No content-type", request.getContentType());
String body = FileCopyUtils.copyToString(request.getReader());
assertEquals("Invalid request body", s, body);
response.setStatus(HttpServletResponse.SC_CREATED);
response.setHeader("Location", location);
response.setContentLength(buf.length);
response.setContentType(contentType.toString());
FileCopyUtils.copy(buf, response.getOutputStream());
}
}
private static class UriServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.setContentType("text/plain");
resp.setCharacterEncoding("UTF-8");
resp.getWriter().write(req.getRequestURI());
}
}
private static class MultipartServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
// TODO: determine how to test this
// assertTrue(ServletFileUpload.isMultipartContent(req));
// FileItemFactory factory = new DiskFileItemFactory();
// ServletFileUpload upload = new ServletFileUpload(factory);
// try {
// List<?> items = upload.parseRequest(req);
// assertEquals(4, items.size());
// FileItem item = (FileItem) items.get(0);
// assertTrue(item.isFormField());
// assertEquals("name 1", item.getFieldName());
// assertEquals("value 1", item.getString());
//
// item = (FileItem) items.get(1);
// assertTrue(item.isFormField());
// assertEquals("name 2", item.getFieldName());
// assertEquals("value 2+1", item.getString());
//
// item = (FileItem) items.get(2);
// assertTrue(item.isFormField());
// assertEquals("name 2", item.getFieldName());
// assertEquals("value 2+2", item.getString());
//
// item = (FileItem) items.get(3);
// assertFalse(item.isFormField());
// assertEquals("logo", item.getFieldName());
// assertEquals("logo.jpg", item.getName());
// assertEquals("image/jpeg", item.getContentType());
// } catch (FileUploadException ex) {
// throw new ServletException(ex);
// }
}
}
}
| |
package com.google.ratel.deps.jackson.databind.ser.std;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import com.google.ratel.deps.jackson.core.*;
import com.google.ratel.deps.jackson.databind.*;
import com.google.ratel.deps.jackson.databind.annotation.JacksonStdImpl;
import com.google.ratel.deps.jackson.databind.jsonFormatVisitors.JsonFormatVisitable;
import com.google.ratel.deps.jackson.databind.jsonFormatVisitors.JsonFormatVisitorWrapper;
import com.google.ratel.deps.jackson.databind.jsonschema.SchemaAware;
import com.google.ratel.deps.jackson.databind.node.JsonNodeFactory;
import com.google.ratel.deps.jackson.databind.node.ObjectNode;
import com.google.ratel.deps.jackson.databind.util.Converter;
/**
* Base class used by all standard serializers, and can also
* be used for custom serializers (in fact, this is the recommended
* base class to use).
* Provides convenience methods for implementing {@link SchemaAware}
*/
public abstract class StdSerializer<T>
extends JsonSerializer<T>
implements JsonFormatVisitable, SchemaAware
{
/**
* Nominal type supported, usually declared type of
* property for which serializer is used.
*/
protected final Class<T> _handledType;
/*
/**********************************************************
/* Life-cycle
/**********************************************************
*/
protected StdSerializer(Class<T> t) {
_handledType = t;
}
@SuppressWarnings("unchecked")
protected StdSerializer(JavaType type) {
_handledType = (Class<T>) type.getRawClass();
}
/**
* Alternate constructor that is (alas!) needed to work
* around kinks of generic type handling
*/
@SuppressWarnings("unchecked")
protected StdSerializer(Class<?> t, boolean dummy) {
_handledType = (Class<T>) t;
}
/*
/**********************************************************
/* Accessors
/**********************************************************
*/
@Override
public Class<T> handledType() { return _handledType; }
/*
/**********************************************************
/* Serialization
/**********************************************************
*/
@Override
public abstract void serialize(T value, JsonGenerator jgen, SerializerProvider provider)
throws IOException, JsonGenerationException;
/*
/**********************************************************
/* Helper methods for JSON Schema generation
/**********************************************************
*/
/**
* Default implementation simply claims type is "string"; usually
* overriden by custom serializers.
*/
@Override
public JsonNode getSchema(SerializerProvider provider, Type typeHint)
throws JsonMappingException
{
return createSchemaNode("string");
}
/**
* Default implementation simply claims type is "string"; usually
* overriden by custom serializers.
*/
@Override
public JsonNode getSchema(SerializerProvider provider, Type typeHint, boolean isOptional)
throws JsonMappingException
{
ObjectNode schema = (ObjectNode) getSchema(provider, typeHint);
if (!isOptional) {
schema.put("required", !isOptional);
}
return schema;
}
protected ObjectNode createObjectNode() {
return JsonNodeFactory.instance.objectNode();
}
protected ObjectNode createSchemaNode(String type)
{
ObjectNode schema = createObjectNode();
schema.put("type", type);
return schema;
}
protected ObjectNode createSchemaNode(String type, boolean isOptional)
{
ObjectNode schema = createSchemaNode(type);
// as per [JACKSON-563]. Note that 'required' defaults to false
if (!isOptional) {
schema.put("required", !isOptional);
}
return schema;
}
/**
* Default implementation specifies no format. This behavior is usually
* overriden by custom serializers.
*/
@Override
public void acceptJsonFormatVisitor(JsonFormatVisitorWrapper visitor, JavaType typeHint)
throws JsonMappingException
{
visitor.expectAnyFormat(typeHint);
}
/*
/**********************************************************
/* Helper methods for exception handling
/**********************************************************
*/
/**
* Method that will modify caught exception (passed in as argument)
* as necessary to include reference information, and to ensure it
* is a subtype of {@link IOException}, or an unchecked exception.
*<p>
* Rules for wrapping and unwrapping are bit complicated; essentially:
*<ul>
* <li>Errors are to be passed as is (if uncovered via unwrapping)
* <li>"Plain" IOExceptions (ones that are not of type
* {@link JsonMappingException} are to be passed as is
*</ul>
*/
public void wrapAndThrow(SerializerProvider provider,
Throwable t, Object bean, String fieldName)
throws IOException
{
/* 05-Mar-2009, tatu: But one nasty edge is when we get
* StackOverflow: usually due to infinite loop. But that
* usually gets hidden within an InvocationTargetException...
*/
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
// Errors and "plain" IOExceptions to be passed as is
if (t instanceof Error) {
throw (Error) t;
}
// Ditto for IOExceptions... except for mapping exceptions!
boolean wrap = (provider == null) || provider.isEnabled(SerializationFeature.WRAP_EXCEPTIONS);
if (t instanceof IOException) {
if (!wrap || !(t instanceof JsonMappingException)) {
throw (IOException) t;
}
} else if (!wrap) { // [JACKSON-407] -- allow disabling wrapping for unchecked exceptions
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
}
// [JACKSON-55] Need to add reference information
throw JsonMappingException.wrapWithPath(t, bean, fieldName);
}
public void wrapAndThrow(SerializerProvider provider,
Throwable t, Object bean, int index)
throws IOException
{
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
// Errors are to be passed as is
if (t instanceof Error) {
throw (Error) t;
}
// Ditto for IOExceptions... except for mapping exceptions!
boolean wrap = (provider == null) || provider.isEnabled(SerializationFeature.WRAP_EXCEPTIONS);
if (t instanceof IOException) {
if (!wrap || !(t instanceof JsonMappingException)) {
throw (IOException) t;
}
} else if (!wrap) { // [JACKSON-407] -- allow disabling wrapping for unchecked exceptions
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
}
// [JACKSON-55] Need to add reference information
throw JsonMappingException.wrapWithPath(t, bean, index);
}
/*
/**********************************************************
/* Helper methods, other
/**********************************************************
*/
/**
* Method that can be called to determine if given serializer is the default
* serializer Jackson uses; as opposed to a custom serializer installed by
* a module or calling application. Determination is done using
* {@link JacksonStdImpl} annotation on serializer class.
*/
protected boolean isDefaultSerializer(JsonSerializer<?> serializer) {
return (serializer != null && serializer.getClass().getAnnotation(JacksonStdImpl.class) != null);
}
/**
* Helper method that can be used to see if specified property has annotation
* indicating that a converter is to be used for contained values (contents
* of structured types; array/List/Map values)
*
* @param existingSerializer (optional) configured content
* serializer if one already exists.
*
* @since 2.2
*/
protected JsonSerializer<?> findConvertingContentSerializer(SerializerProvider provider,
BeanProperty prop, JsonSerializer<?> existingSerializer)
throws JsonMappingException
{
final AnnotationIntrospector intr = provider.getAnnotationIntrospector();
if (intr != null && prop != null) {
Object convDef = intr.findSerializationContentConverter(prop.getMember());
if (convDef != null) {
Converter<Object,Object> conv = provider.converterInstance(prop.getMember(), convDef);
JavaType delegateType = conv.getOutputType(provider.getTypeFactory());
if (existingSerializer == null) {
existingSerializer = provider.findValueSerializer(delegateType, prop);
}
return new StdDelegatingSerializer(conv, delegateType, existingSerializer);
}
}
return existingSerializer;
}
}
| |
package com.digiarea.closure.preferences.model.controller;
import java.io.File;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Button;
import javafx.scene.control.RadioButton;
import javafx.scene.control.SelectionMode;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.ToggleGroup;
import javafx.scene.control.Tooltip;
import javafx.scene.layout.Pane;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.scene.text.FontPosture;
import javafx.util.Callback;
import com.digiarea.closure.model.controller.UIUtils;
import com.digiarea.closure.preferences.model.Editor;
import com.digiarea.closure.preferences.model.bind.ModelFacade;
import com.digiarea.closurefx.IConstants;
import com.digiarea.closurefx.editors.EditorLoader;
/**
* FXML Controller class
*
* @author daginno
*/
public class PreferenceEditorsController extends ClosurePreferencesController implements Initializable {
public PreferenceEditorsController(ModelFacade modelFacade, ResourceBundle bundle) {
super(modelFacade, bundle);
}
@FXML
private TableView<Editor> controlEditors;
@FXML
private TableColumn<Editor, Boolean> controlDefault;
@FXML
private TableColumn<Editor, String> controlName;
@FXML
private TableColumn<Editor, String> controlEditorPath;
@FXML
private TableColumn<Editor, String> controlButton;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
controlEditors.getSelectionModel().setSelectionMode(SelectionMode.SINGLE);
final ToggleGroup toggleGroup = new ToggleGroup();
controlEditors.setEditable(true);
controlEditors.widthProperty().addListener(new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> ov, Number t, Number t1) {
Pane header = (Pane) controlEditors.lookup("TableHeaderRow");
if (header != null && header.isVisible()) {
header.setMaxHeight(0);
header.setMinHeight(0);
header.setPrefHeight(0);
header.setVisible(false);
header.setManaged(false);
}
}
});
controlDefault.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<Editor, Boolean>, ObservableValue<Boolean>>() {
@Override
public ObservableValue<Boolean> call(TableColumn.CellDataFeatures<Editor, Boolean> p) {
if (p.getValue() != null) {
return new SimpleBooleanProperty(p.getValue().isDefault());
} else {
return new SimpleBooleanProperty(false);
}
}
});
controlDefault.setCellFactory(new Callback<TableColumn<Editor, Boolean>, TableCell<Editor, Boolean>>() {
@Override
public TableCell<Editor, Boolean> call(TableColumn<Editor, Boolean> param) {
return new PreferenceEditorsController.EditorRectCell(toggleGroup);
}
});
controlButton.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<Editor, String>, ObservableValue<String>>() {
@Override
public ObservableValue<String> call(TableColumn.CellDataFeatures<Editor, String> p) {
if (p.getValue() != null) {
return p.getValue().nameProperty();
} else {
return new SimpleStringProperty();
}
}
});
controlButton.setCellFactory(new Callback<TableColumn<Editor, String>, TableCell<Editor, String>>() {
@Override
public TableCell<Editor, String> call(TableColumn<Editor, String> param) {
return new PreferenceEditorsController.ButtonCell();
}
});
controlEditorPath.setCellFactory(new Callback<TableColumn<Editor, String>, TableCell<Editor, String>>() {
@Override
public TableCell<Editor, String> call(TableColumn<Editor, String> param) {
return new PreferenceEditorsController.PathCell();
}
});
controlEditorPath.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<Editor, String>, ObservableValue<String>>() {
@Override
public ObservableValue<String> call(TableColumn.CellDataFeatures<Editor, String> p) {
return p.getValue().pathProperty();
}
});
controlName.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<Editor, String>, ObservableValue<String>>() {
@Override
public ObservableValue<String> call(TableColumn.CellDataFeatures<Editor, String> p) {
if (p.getValue() != null) {
return p.getValue().nameProperty();
} else {
return new SimpleStringProperty("<no name>");
}
}
});
}
@FXML
private void handleApplyButtonAction(ActionEvent event) {
modelFacade.saveEditors();
}
public class PathCell extends TableCell<Editor, String> {
public PathCell() {
}
@Override
protected void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
Editor editor = (Editor) getTableRow().getItem();
if (editor != null) {
if (EditorLoader.isSupported(editor)) {
if (editor.getPath() == null || editor.getPath().isEmpty()) {
setText("");
} else if (!EditorLoader.isValid((Editor) getTableRow().getItem())) {
setText("<invalid path> " + item);
setTooltip(new Tooltip(item));
setTextFill(Color.RED);
setFont(Font.font("Arial", FontPosture.ITALIC, 11));
} else {
setText(item);
setTooltip(new Tooltip(item));
setTextFill(Color.BLACK);
setFont(Font.font("Arial", FontPosture.REGULAR, 11));
}
} else {
setText("<unsupported for your os>");
setTextFill(Color.LIGHTCORAL);
setFont(Font.font("Arial", FontPosture.ITALIC, 11));
}
}
}
}
public class ButtonCell extends TableCell<Editor, String> {
private Button box;
public ButtonCell() {
}
@Override
protected void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
if (!isEmpty()) {
if (box == null) {
createCheckBox();
}
setGraphic(box);
}
}
private void createCheckBox() {
box = new Button("Browse");
box.getStyleClass().add(IConstants.CSS_INVISIBLE_BUTTON);
box.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
File file = UIUtils.chooseFolder(null, "Select source folder");
if (file != null) {
Editor editor = (Editor) getTableRow().getItem();
if (editor != null) {
modelFacade.updateEditorPath(editor, file.getAbsolutePath());
}
}
}
});
}
}
public class EditorRectCell extends TableCell<Editor, Boolean> {
private RadioButton box;
private ToggleGroup toggleGroup;
public EditorRectCell(ToggleGroup toggleGroup) {
this.toggleGroup = toggleGroup;
}
@Override
protected void updateItem(Boolean item, boolean empty) {
super.updateItem(item, empty);
if (!isEmpty()) {
if (box == null) {
createCheckBox();
}
if (item != null) {
box.setSelected(item);
}
setGraphic(box);
setTooltip(new Tooltip("Mark as Default"));
getTableRow().setDisable(!EditorLoader.isSupported(((Editor) getTableRow().getItem())));
}
}
private void createCheckBox() {
box = new RadioButton();
box.setToggleGroup(toggleGroup);
box.getStyleClass().add(IConstants.CSS_INVISIBLE_RADIO_BUTTON);
box.selectedProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> arg0, Boolean arg1, Boolean arg2) {
((Editor) getTableRow().getItem()).setDefault(box.isSelected());
commitEdit(getItem());
}
});
}
}
public TableView<Editor> getControlEditors() {
return controlEditors;
}
public TableColumn<Editor, Boolean> getControlDefault() {
return controlDefault;
}
public TableColumn<Editor, String> getControlName() {
return controlName;
}
public TableColumn<Editor, String> getControlEditorPath() {
return controlEditorPath;
}
public TableColumn<Editor, String> getControlButton() {
return controlButton;
}
}
| |
/*
* Copyright 2008-2013 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hippoecm.repository;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import javax.jcr.ItemExistsException;
import javax.jcr.Node;
import javax.jcr.NodeIterator;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.lock.LockException;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.version.VersionException;
import org.hippoecm.repository.api.HippoNodeType;
import org.junit.After;
import org.junit.Before;
import org.onehippo.repository.testutils.RepositoryTestCase;
import static org.junit.Assert.assertEquals;
public abstract class FacetedNavigationAbstractTest extends RepositoryTestCase {
static class Document {
int docid;
int x, y, z;
public Document(int docid) {
this.docid = docid;
x = y = z = 0;
}
}
private static String alphabet = "abcde"; // abcdefghijklmnopqrstuvwxyz
private static int hierDepth = 3;
private static int saveInterval = 250;
private static final int defaultNumDocs = 20;
private int numDocs = -1;
private static Random rnd;
private String[] nodeNames;
protected boolean verbose = false;
private Map<Integer,Document> documents;
protected FacetedNavigationAbstractTest() {
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
}
@After
@Override
public void tearDown() throws Exception {
super.tearDown();
}
private void createNodeNames() {
nodeNames = new String[alphabet.length()];
for (int i=0; i<alphabet.length(); i++) {
nodeNames[i] = alphabet.substring(i,i+1);
}
}
private void createStructure(Node node, int level) throws ItemExistsException, PathNotFoundException, VersionException,
ConstraintViolationException, LockException, RepositoryException {
for (int i=0; i<alphabet.length(); i++) {
if(verbose) {
System.out.println((" ".substring(0,level))+nodeNames[i]);
}
Node child = node.addNode(nodeNames[i],"hippo:testdocument");
child.addMixin("mix:versionable");
if (level-1 > 0) {
createStructure(child, level-1);
}
}
if (level-1 == 0) {
node.getSession().save();
}
}
protected Node getRandomDocNode() throws RepositoryException {
StringBuffer path = new StringBuffer("test/documents");
for (int depth = 0; depth < hierDepth; depth++) {
path.append("/");
path.append(nodeNames[rnd.nextInt(alphabet.length())]);
}
return session.getRootNode().getNode(path.toString());
}
private Map<Integer,Document> fill(Node node) throws RepositoryException {
Node docs = node.addNode("documents", "nt:unstructured");
docs.addMixin("mix:referenceable");
createStructure(docs, hierDepth);
session.save();
// don't change seed. Tests depend on it to stay the same
rnd = new Random(1L);
Map<Integer, Document> documents = new HashMap<Integer, Document>();
for (int docid = 0; docid < numDocs; docid++) {
Document document = new Document(docid);
Node doc = getRandomDocNode();
doc = doc.addNode(Integer.toString(docid), "hippo:testdocument");
doc.addMixin("mix:versionable");
doc.setProperty("docid", Integer.toString(docid));
if ((document.x = rnd.nextInt(3)) > 0) {
doc.setProperty("x", "x" + document.x);
}
if ((document.y = rnd.nextInt(3)) > 0) {
doc.setProperty("y", "y" + document.y);
}
if ((document.z = rnd.nextInt(3)) > 0) {
doc.setProperty("z", "z" + document.z);
}
if ((docid + 1) % saveInterval == 0) {
session.save();
}
documents.put(Integer.valueOf(docid), document);
}
return documents;
}
final void createSearchNode(Node node) throws RepositoryException {
node = node.addNode("navigation");
node = node.addNode("xyz", HippoNodeType.NT_FACETSEARCH);
node.setProperty(HippoNodeType.HIPPO_QUERYNAME, "xyz");
node.setProperty(HippoNodeType.HIPPO_DOCBASE, session.getRootNode().getNode("test/documents").getIdentifier());
node.setProperty(HippoNodeType.HIPPO_FACETS, new String[] { "x", "y", "z" });
}
final Node getSearchNode() throws RepositoryException {
return session.getRootNode().getNode("test/navigation/xyz");
}
final Node getDocsNode() throws RepositoryException {
return session.getRootNode().getNode("test/documents");
}
protected void traverse(Node node) throws RepositoryException {
if(verbose) {
if(node.hasProperty(HippoNodeType.HIPPO_COUNT)) {
System.out.println(node.getPath() + "\t" + node.getProperty(HippoNodeType.HIPPO_COUNT).getLong());
}
}
for (NodeIterator iter = node.getNodes(); iter.hasNext();) {
Node child = iter.nextNode();
if (!"jcr:system".equals(child.getName())) {
traverse(child);
}
}
}
protected void check(String facetPath, int x, int y, int z)
throws RepositoryException {
int realCount = -1;
Node node = session.getRootNode();
if(facetPath.startsWith("/"))
facetPath = facetPath.substring(1); // skip the initial slash
String[] pathElements = facetPath.split("/");
try {
for(int i=0; i<pathElements.length; i++) {
node = node.getNode(pathElements[i]);
}
if(verbose)
System.out.println(facetPath + "\t" + node.getProperty(HippoNodeType.HIPPO_COUNT).getLong());
Node nodeResultSet = node.getNode(HippoNodeType.HIPPO_RESULTSET);
NodeIterator iter = nodeResultSet.getNodes();
realCount = 0;
while(iter.hasNext()) {
Node child = iter.nextNode();
++realCount;
if(verbose) {
System.out.print("\t" + child.getProperty("docid").getString());
System.out.print("\t" + (child.hasProperty("x") ? child.getProperty("x").getString().substring(1) : "0"));
System.out.print("\t" + (child.hasProperty("y") ? child.getProperty("y").getString().substring(1) : "0"));
System.out.print("\t" + (child.hasProperty("z") ? child.getProperty("z").getString().substring(1) : "0"));
System.out.println();
}
}
if(node.hasProperty(HippoNodeType.HIPPO_COUNT)) {
long obtainedCount = (int) node.getProperty(HippoNodeType.HIPPO_COUNT).getLong();
assertEquals("counted and indicated mismatch on "+facetPath, realCount, obtainedCount);
}
} catch(PathNotFoundException ex) {
System.err.println("PathNotFoundException: "+ex.getMessage());
ex.printStackTrace(System.err);
realCount = 0;
if(verbose)
System.out.println(facetPath + "\tno results");
}
int checkedCount = 0;
if(verbose)
System.out.println();
for(Document document : documents.values()) {
if((x == 0 || x == document.x) && (y == 0 || y == document.y) && (z == 0 || z == document.z)) {
if(verbose)
System.out.println("\t"+document.docid+"\t"+document.x+"\t"+document.y+"\t"+document.z);
++checkedCount;
}
}
if(verbose)
System.out.println(facetPath + "\t" + realCount + "\t" + checkedCount);
assertEquals("counted and reference mismatch on "+facetPath, checkedCount, realCount);
}
final void commonStart(int numDocs) throws RepositoryException {
this.numDocs = numDocs;
Node test = session.getRootNode().addNode("test");
createNodeNames();
documents = fill(test);
// do save and refresh to make sure the uuid is generated
session.save();
session.refresh(false);
createSearchNode(test);
session.save();
session.refresh(false);
}
final void commonStart() throws RepositoryException {
/**
* DefaultNumDocs results in:
/test/navigation/xyz 25
/test/navigation/xyz/x1 8
/test/navigation/xyz/x1/y1 3
/test/navigation/xyz/x1/y1/z1 1
/test/navigation/xyz/x1/y1/z1/hippo:resultset 1
/test/navigation/xyz/x1/y1/hippo:resultset 3
/test/navigation/xyz/x1/y2 3
/test/navigation/xyz/x1/y2/z1 1
/test/navigation/xyz/x1/y2/z1/hippo:resultset 1
/test/navigation/xyz/x1/y2/z2 1
/test/navigation/xyz/x1/y2/z2/hippo:resultset 1
/test/navigation/xyz/x1/y2/hippo:resultset 3
/test/navigation/xyz/x1/hippo:resultset 8
/test/navigation/xyz/x2 6
/test/navigation/xyz/x2/y1 6
/test/navigation/xyz/x2/y1/z1 2
/test/navigation/xyz/x2/y1/z1/hippo:resultset 2
/test/navigation/xyz/x2/y1/z2 2
/test/navigation/xyz/x2/y1/z2/hippo:resultset 2
/test/navigation/xyz/x2/y1/hippo:resultset 6
/test/navigation/xyz/x2/hippo:resultset 6
/test/navigation/xyz/hippo:resultset 25
*/
commonStart(defaultNumDocs);
}
final void commonEnd() throws RepositoryException {
}
public boolean getVerbose() {
return verbose;
}
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
}
| |
// ============================================================================
//
// Copyright (C) 2006-2015 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.daikon.properties;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.*;
import static org.talend.daikon.properties.property.PropertyFactory.*;
import java.util.EnumSet;
import org.apache.commons.lang3.reflect.TypeUtils;
import org.junit.Test;
import org.talend.daikon.properties.presentation.Widget;
import org.talend.daikon.properties.property.Property;
import org.talend.daikon.properties.property.Property.Flags;
import org.talend.daikon.properties.property.PropertyFactory;
import org.talend.daikon.properties.property.StringProperty;
import com.cedarsoftware.util.io.JsonReader;
import com.cedarsoftware.util.io.JsonWriter;
public class PropertyTest {
@Test
public void testProperty() {
Property<String> element = newProperty(null);
assertNull(element.getName());
assertEquals(element, element.setName("testName"));
assertEquals("testName", element.getName());
// displayName use the name
assertEquals("property.testName.displayName", element.getDisplayName());
assertEquals(element, element.setDisplayName("testDisplayName"));
assertEquals("testDisplayName", element.getDisplayName());
assertNull(element.getTitle());
assertEquals(element, element.setTitle("testTitle"));
assertEquals("testTitle", element.getTitle());
assertEquals(TypeUtils.toString(String.class), element.getType());
assertEquals(-1, element.getSize());
assertTrue(element.isSizeUnbounded());
assertEquals(element, element.setSize(28));
assertEquals(28, element.getSize());
assertFalse(element.isSizeUnbounded());
assertEquals(element, element.setSize(-1));
assertTrue(element.isSizeUnbounded());
assertEquals(0, element.getOccurMinTimes());
assertFalse(element.isRequired());
assertEquals(element, element.setOccurMinTimes(33));
assertEquals(33, element.getOccurMinTimes());
assertTrue(element.isRequired());
assertEquals(0, element.getOccurMaxTimes());
assertEquals(element, element.setOccurMaxTimes(42));
assertEquals(42, element.getOccurMaxTimes());
assertEquals(element, element.setOccurMinTimes(0));
element.setRequired();
assertTrue(element.isRequired());
assertEquals(1, element.getOccurMinTimes());
assertEquals(1, element.getOccurMaxTimes());
element.setRequired(false);
assertEquals(0, element.getOccurMinTimes());
assertEquals(1, element.getOccurMaxTimes());
assertEquals(0, element.getPrecision());
assertEquals(element, element.setPrecision(222));
assertEquals(222, element.getPrecision());
assertNull(element.getPattern());
assertEquals(element, element.setPattern("mypattern"));
assertEquals("mypattern", element.getPattern());
assertNull(element.getValue());
element.setValue("mypattern");
assertEquals("mypattern", element.getValue());
assertFalse(element.isNullable());
assertEquals(element, element.setNullable(true));
assertTrue(element.isNullable());
assertEquals(element, element.setNullable(false));
assertFalse(element.isNullable());
assertEquals("testName", element.toStringIndent(0));
assertEquals(" testName", element.toStringIndent(1));
assertEquals(" testName", element.toStringIndent(4));
}
@Test
public void testHiddenForProperties() {
Property<String> element = newProperty("element");
assertFalse(element.isFlag(Property.Flags.HIDDEN));
Widget widget = new Widget(element);
assertFalse(element.isFlag(Property.Flags.HIDDEN));
widget.setHidden(true);
assertTrue(element.isFlag(Property.Flags.HIDDEN));
widget.setHidden(false);
assertFalse(element.isFlag(Property.Flags.HIDDEN));
widget.setHidden();
assertTrue(element.isFlag(Property.Flags.HIDDEN));
}
@Test
public void testVisibleForProperties() {
Property<String> element = newProperty("element");
assertFalse(element.isFlag(Property.Flags.HIDDEN));
Widget widget = new Widget(element);
assertFalse(element.isFlag(Property.Flags.HIDDEN));
widget.setVisible(false);
assertTrue(element.isFlag(Property.Flags.HIDDEN));
widget.setVisible(true);
assertFalse(element.isFlag(Property.Flags.HIDDEN));
widget.setVisible(false);
assertTrue(element.isFlag(Property.Flags.HIDDEN));
widget.setVisible();
assertFalse(element.isFlag(Property.Flags.HIDDEN));
}
@Test
public void testFlags() {
Property<String> element = newProperty("element");
assertFalse(element.isFlag(Property.Flags.ENCRYPT));
assertFalse(element.isFlag(Property.Flags.HIDDEN));
element.addFlag(Property.Flags.ENCRYPT);
assertTrue(element.isFlag(Property.Flags.ENCRYPT));
assertFalse(element.isFlag(Property.Flags.HIDDEN));
element.addFlag(Property.Flags.HIDDEN);
assertTrue(element.isFlag(Property.Flags.ENCRYPT));
assertTrue(element.isFlag(Property.Flags.HIDDEN));
element.removeFlag(Property.Flags.HIDDEN);
assertTrue(element.isFlag(Property.Flags.ENCRYPT));
assertFalse(element.isFlag(Property.Flags.HIDDEN));
element.removeFlag(Property.Flags.ENCRYPT);
assertFalse(element.isFlag(Property.Flags.ENCRYPT));
assertFalse(element.isFlag(Property.Flags.HIDDEN));
String elementStr = JsonWriter.objectToJson(element);
element = (Property) JsonReader.jsonToJava(elementStr);
element.addFlag(Property.Flags.HIDDEN);
element.addFlag(Property.Flags.ENCRYPT);
assertTrue(element.isFlag(Property.Flags.ENCRYPT));
}
@Test
public void testCopyTaggedValues() {
Property<String> element = PropertyFactory.newString("element");
element.setTaggedValue("foo", "foo1");
Property<String> element2 = PropertyFactory.newString("element2");
element2.setTaggedValue("bar", "bar1");
assertEquals("foo1", element.getTaggedValue("foo"));
assertNotEquals("bar1", element.getTaggedValue("bar"));
element.copyTaggedValues(element2);
assertEquals("foo1", element.getTaggedValue("foo"));
assertEquals("bar1", element.getTaggedValue("bar"));
}
@Test
public void testSetPossibleValuesNotNamedNamedThing() {
StringProperty stringProperty = new StringProperty("foo") {// in order to have i18n related to this class
};
stringProperty.setPossibleValues("possible.value");
assertEquals("possible.value", stringProperty.getPossibleValuesDisplayName("possible.value"));
stringProperty.setPossibleValues("possible.value.3");
assertEquals("possible value 3 i18n", stringProperty.getPossibleValuesDisplayName("possible.value.3"));
}
@Test
public void testType() {
Property foo = PropertyFactory.newInteger("foo");
foo.setValue("bar");
assertEquals("bar", foo.getValue());
}
@Test
public void testEncryptDoNothing() {
class NotAnExistingType {// left empty on purpose
}
Property<NotAnExistingType> foo = PropertyFactory.newProperty(NotAnExistingType.class, "foo")
.setFlags(EnumSet.of(Flags.ENCRYPT));
NotAnExistingType notAnExistingTypeInstance = new NotAnExistingType();
foo.setValue(notAnExistingTypeInstance);
assertEquals(notAnExistingTypeInstance, foo.getValue());
foo.encryptStoredValue(true);
assertEquals(notAnExistingTypeInstance, foo.getValue());
foo.encryptStoredValue(false);
assertEquals(notAnExistingTypeInstance, foo.getValue());
}
@Test
public void testEquals() {
Property<String> prop1 = newProperty("name");
prop1.setValue("foo");
Property<String> prop2 = newProperty("name");
prop2.setValue("foo");
Property<String> prop3 = newProperty("name");
prop3.setValue("bar");
/* Reflexive */
assertThat(prop1.equals(prop1), is(Boolean.TRUE));
assertThat(prop2.equals(prop2), is(Boolean.TRUE));
/* Symmetric */
assertThat(prop1.equals(prop2), is(Boolean.TRUE));
assertThat(prop2.equals(prop1), is(Boolean.TRUE));
/* Transitive */
assertThat(prop1.equals(null), is(Boolean.FALSE));
assertThat(prop2.equals(null), is(Boolean.FALSE));
assertThat(prop1.equals(prop3), is(Boolean.FALSE));
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl.clientside;
import com.hazelcast.cache.CacheNotExistsException;
import com.hazelcast.client.AuthenticationException;
import com.hazelcast.client.UndefinedErrorCodeException;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.ClientProtocolErrorCodes;
import com.hazelcast.client.impl.protocol.codec.ErrorCodec;
import com.hazelcast.client.impl.protocol.exception.MaxMessageSizeExceeded;
import com.hazelcast.config.ConfigurationException;
import com.hazelcast.config.InvalidConfigurationException;
import com.hazelcast.core.ConsistencyLostException;
import com.hazelcast.core.DuplicateInstanceNameException;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.core.HazelcastOverloadException;
import com.hazelcast.core.IndeterminateOperationStateException;
import com.hazelcast.core.LocalMemberResetException;
import com.hazelcast.core.MemberLeftException;
import com.hazelcast.core.OperationTimeoutException;
import com.hazelcast.crdt.MutationDisallowedException;
import com.hazelcast.crdt.TargetNotReplicaException;
import com.hazelcast.durableexecutor.StaleTaskIdException;
import com.hazelcast.flakeidgen.impl.NodeIdOutOfRangeException;
import com.hazelcast.internal.cluster.impl.ConfigMismatchException;
import com.hazelcast.map.QueryResultSizeExceededException;
import com.hazelcast.map.ReachedMaxSizeException;
import com.hazelcast.mapreduce.RemoteMapReduceException;
import com.hazelcast.mapreduce.TopologyChangedException;
import com.hazelcast.memory.NativeOutOfMemoryError;
import com.hazelcast.nio.serialization.HazelcastSerializationException;
import com.hazelcast.partition.NoDataMemberInClusterException;
import com.hazelcast.query.QueryException;
import com.hazelcast.quorum.QuorumException;
import com.hazelcast.replicatedmap.ReplicatedMapCantBeCreatedOnLiteMemberException;
import com.hazelcast.ringbuffer.StaleSequenceException;
import com.hazelcast.scheduledexecutor.DuplicateTaskException;
import com.hazelcast.scheduledexecutor.StaleTaskException;
import com.hazelcast.spi.exception.CallerNotMemberException;
import com.hazelcast.spi.exception.DistributedObjectDestroyedException;
import com.hazelcast.spi.exception.PartitionMigratingException;
import com.hazelcast.spi.exception.ResponseAlreadySentException;
import com.hazelcast.spi.exception.RetryableHazelcastException;
import com.hazelcast.spi.exception.RetryableIOException;
import com.hazelcast.spi.exception.ServiceNotFoundException;
import com.hazelcast.spi.exception.TargetDisconnectedException;
import com.hazelcast.spi.exception.TargetNotMemberException;
import com.hazelcast.spi.exception.WrongTargetException;
import com.hazelcast.topic.TopicOverloadException;
import com.hazelcast.transaction.TransactionException;
import com.hazelcast.transaction.TransactionNotActiveException;
import com.hazelcast.transaction.TransactionTimedOutException;
import com.hazelcast.util.AddressUtil;
import com.hazelcast.wan.WANReplicationQueueFullException;
import javax.cache.CacheException;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriterException;
import javax.cache.processor.EntryProcessorException;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.LoginException;
import javax.transaction.xa.XAException;
import java.io.EOFException;
import java.io.IOException;
import java.io.NotSerializableException;
import java.io.UTFDataFormatException;
import java.net.SocketException;
import java.net.URISyntaxException;
import java.security.AccessControlException;
import java.util.Arrays;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeoutException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class has the error codes and means of
* 1) creating exception from error code
* 2) getting the error code of given exception
*/
public class ClientExceptionFactory {
private static final String CAUSED_BY_STACKTRACE_MARKER = "###### Caused by:";
/**
* This pattern extracts errorCode and exception message from the encoded Caused-by marker.
* It has the form:
* <pre> ###### Caused by: (<errorCode>) <cause.toString()> ------</pre>
*
* As per {@link Throwable#toString()}, this has the form
* <pre><exception class>: <message></pre>
*
* if message is present, or just {@code <exception class>}, if message is null.
*
* <p>Commonly, exceptions with causes are created like this:
* <pre>new RuntimeException("Additional message: " + e, e);</pre>
*
* Thus, this pattern matches the marker, error code in parentheses, text up to the semicolon
* (reluctantly, as to find the first one), and optional semicolon and the rest of message.
*/
private static final Pattern CAUSED_BY_STACKTRACE_PARSER = Pattern.compile(Pattern.quote(CAUSED_BY_STACKTRACE_MARKER)
+ " \\((-?[0-9]+)\\) (.+?)(: (.*))? ------", Pattern.DOTALL);
private static final int CAUSED_BY_STACKTRACE_PARSER_ERROR_CODE_GROUP = 1;
private static final int CAUSED_BY_STACKTRACE_PARSER_CLASS_NAME_GROUP = 2;
private static final int CAUSED_BY_STACKTRACE_PARSER_MESSAGE_GROUP = 4;
private final Map<Integer, ExceptionFactory> intToFactory = new HashMap<Integer, ExceptionFactory>();
public ClientExceptionFactory(boolean jcacheAvailable) {
if (jcacheAvailable) {
register(ClientProtocolErrorCodes.CACHE, CacheException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CacheException(message, cause);
}
});
register(ClientProtocolErrorCodes.CACHE_LOADER, CacheLoaderException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CacheLoaderException(message, cause);
}
});
register(ClientProtocolErrorCodes.CACHE_WRITER, CacheWriterException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CacheWriterException(message, cause);
}
});
register(ClientProtocolErrorCodes.ENTRY_PROCESSOR, EntryProcessorException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new EntryProcessorException(message, cause);
}
});
}
register(ClientProtocolErrorCodes.ARRAY_INDEX_OUT_OF_BOUNDS, ArrayIndexOutOfBoundsException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ArrayIndexOutOfBoundsException(message);
}
});
register(ClientProtocolErrorCodes.ARRAY_STORE, ArrayStoreException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ArrayStoreException(message);
}
});
register(ClientProtocolErrorCodes.AUTHENTICATION, AuthenticationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new AuthenticationException(message);
}
});
register(ClientProtocolErrorCodes.CACHE_NOT_EXISTS, CacheNotExistsException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CacheNotExistsException(message);
}
});
register(ClientProtocolErrorCodes.CALLER_NOT_MEMBER, CallerNotMemberException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CallerNotMemberException(message);
}
});
register(ClientProtocolErrorCodes.CANCELLATION, CancellationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new CancellationException(message);
}
});
register(ClientProtocolErrorCodes.CLASS_CAST, ClassCastException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ClassCastException(message);
}
});
register(ClientProtocolErrorCodes.CLASS_NOT_FOUND, ClassNotFoundException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ClassNotFoundException(message, cause);
}
});
register(ClientProtocolErrorCodes.CONCURRENT_MODIFICATION, ConcurrentModificationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ConcurrentModificationException(message);
}
});
register(ClientProtocolErrorCodes.CONFIG_MISMATCH, ConfigMismatchException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ConfigMismatchException(message);
}
});
register(ClientProtocolErrorCodes.CONFIGURATION, ConfigurationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ConfigurationException(message);
}
});
register(ClientProtocolErrorCodes.DISTRIBUTED_OBJECT_DESTROYED, DistributedObjectDestroyedException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new DistributedObjectDestroyedException(message);
}
});
register(ClientProtocolErrorCodes.DUPLICATE_INSTANCE_NAME, DuplicateInstanceNameException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new DuplicateInstanceNameException(message);
}
});
register(ClientProtocolErrorCodes.EOF, EOFException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new EOFException(message);
}
});
register(ClientProtocolErrorCodes.EXECUTION, ExecutionException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ExecutionException(message, cause);
}
});
register(ClientProtocolErrorCodes.HAZELCAST, HazelcastException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new HazelcastException(message, cause);
}
});
register(ClientProtocolErrorCodes.HAZELCAST_INSTANCE_NOT_ACTIVE, HazelcastInstanceNotActiveException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new HazelcastInstanceNotActiveException(message);
}
});
register(ClientProtocolErrorCodes.HAZELCAST_OVERLOAD, HazelcastOverloadException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new HazelcastOverloadException(message);
}
});
register(ClientProtocolErrorCodes.HAZELCAST_SERIALIZATION, HazelcastSerializationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new HazelcastSerializationException(message, cause);
}
});
register(ClientProtocolErrorCodes.IO, IOException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IOException(message, cause);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_ARGUMENT, IllegalArgumentException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalArgumentException(message, cause);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_ACCESS_EXCEPTION, IllegalAccessException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalAccessException(message);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_ACCESS_ERROR, IllegalAccessError.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalAccessError(message);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_MONITOR_STATE, IllegalMonitorStateException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalMonitorStateException(message);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_STATE, IllegalStateException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalStateException(message, cause);
}
});
register(ClientProtocolErrorCodes.ILLEGAL_THREAD_STATE, IllegalThreadStateException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IllegalThreadStateException(message);
}
});
register(ClientProtocolErrorCodes.INDEX_OUT_OF_BOUNDS, IndexOutOfBoundsException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IndexOutOfBoundsException(message);
}
});
register(ClientProtocolErrorCodes.INTERRUPTED, InterruptedException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new InterruptedException(message);
}
});
register(ClientProtocolErrorCodes.INVALID_ADDRESS, AddressUtil.InvalidAddressException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new AddressUtil.InvalidAddressException(message, false);
}
});
register(ClientProtocolErrorCodes.INVALID_CONFIGURATION, InvalidConfigurationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new InvalidConfigurationException(message, cause);
}
});
register(ClientProtocolErrorCodes.MEMBER_LEFT, MemberLeftException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new MemberLeftException(message);
}
});
register(ClientProtocolErrorCodes.NEGATIVE_ARRAY_SIZE, NegativeArraySizeException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NegativeArraySizeException(message);
}
});
register(ClientProtocolErrorCodes.NO_SUCH_ELEMENT, NoSuchElementException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NoSuchElementException(message);
}
});
register(ClientProtocolErrorCodes.NOT_SERIALIZABLE, NotSerializableException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NotSerializableException(message);
}
});
register(ClientProtocolErrorCodes.NULL_POINTER, NullPointerException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NullPointerException(message);
}
});
register(ClientProtocolErrorCodes.OPERATION_TIMEOUT, OperationTimeoutException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new OperationTimeoutException(message);
}
});
register(ClientProtocolErrorCodes.PARTITION_MIGRATING, PartitionMigratingException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new PartitionMigratingException(message);
}
});
register(ClientProtocolErrorCodes.QUERY, QueryException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new QueryException(message, cause);
}
});
register(ClientProtocolErrorCodes.QUERY_RESULT_SIZE_EXCEEDED, QueryResultSizeExceededException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new QueryResultSizeExceededException(message);
}
});
register(ClientProtocolErrorCodes.QUORUM, QuorumException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new QuorumException(message);
}
});
register(ClientProtocolErrorCodes.REACHED_MAX_SIZE, ReachedMaxSizeException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ReachedMaxSizeException(message);
}
});
register(ClientProtocolErrorCodes.REJECTED_EXECUTION, RejectedExecutionException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new RejectedExecutionException(message, cause);
}
});
register(ClientProtocolErrorCodes.REMOTE_MAP_REDUCE, RemoteMapReduceException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new RemoteMapReduceException(message, Collections.<Exception>emptyList());
}
});
register(ClientProtocolErrorCodes.RESPONSE_ALREADY_SENT, ResponseAlreadySentException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ResponseAlreadySentException(message);
}
});
register(ClientProtocolErrorCodes.RETRYABLE_HAZELCAST, RetryableHazelcastException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new RetryableHazelcastException(message, cause);
}
});
register(ClientProtocolErrorCodes.RETRYABLE_IO, RetryableIOException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new RetryableIOException(message, cause);
}
});
register(ClientProtocolErrorCodes.RUNTIME, RuntimeException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new RuntimeException(message, cause);
}
});
register(ClientProtocolErrorCodes.SECURITY, SecurityException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new SecurityException(message, cause);
}
});
register(ClientProtocolErrorCodes.SOCKET, SocketException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new SocketException(message);
}
});
register(ClientProtocolErrorCodes.STALE_SEQUENCE, StaleSequenceException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new StaleSequenceException(message, 0);
}
});
register(ClientProtocolErrorCodes.TARGET_DISCONNECTED, TargetDisconnectedException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TargetDisconnectedException(message);
}
});
register(ClientProtocolErrorCodes.TARGET_NOT_MEMBER, TargetNotMemberException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TargetNotMemberException(message);
}
});
register(ClientProtocolErrorCodes.TIMEOUT, TimeoutException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TimeoutException(message);
}
});
register(ClientProtocolErrorCodes.TOPIC_OVERLOAD, TopicOverloadException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TopicOverloadException(message);
}
});
register(ClientProtocolErrorCodes.TOPOLOGY_CHANGED, TopologyChangedException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TopologyChangedException(message);
}
});
register(ClientProtocolErrorCodes.TRANSACTION, TransactionException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TransactionException(message, cause);
}
});
register(ClientProtocolErrorCodes.TRANSACTION_NOT_ACTIVE, TransactionNotActiveException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TransactionNotActiveException(message);
}
});
register(ClientProtocolErrorCodes.TRANSACTION_TIMED_OUT, TransactionTimedOutException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TransactionTimedOutException(message, cause);
}
});
register(ClientProtocolErrorCodes.URI_SYNTAX, URISyntaxException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new URISyntaxException("not available", message);
}
});
register(ClientProtocolErrorCodes.UTF_DATA_FORMAT, UTFDataFormatException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new UTFDataFormatException(message);
}
});
register(ClientProtocolErrorCodes.UNSUPPORTED_OPERATION, UnsupportedOperationException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new UnsupportedOperationException(message, cause);
}
});
register(ClientProtocolErrorCodes.WRONG_TARGET, WrongTargetException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new WrongTargetException(message);
}
});
register(ClientProtocolErrorCodes.XA, XAException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new XAException(message);
}
});
register(ClientProtocolErrorCodes.ACCESS_CONTROL, AccessControlException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new AccessControlException(message);
}
});
register(ClientProtocolErrorCodes.LOGIN, LoginException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new LoginException(message);
}
});
register(ClientProtocolErrorCodes.UNSUPPORTED_CALLBACK, UnsupportedCallbackException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new UnsupportedCallbackException(null, message);
}
});
register(ClientProtocolErrorCodes.NO_DATA_MEMBER, NoDataMemberInClusterException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NoDataMemberInClusterException(message);
}
});
register(ClientProtocolErrorCodes.REPLICATED_MAP_CANT_BE_CREATED, ReplicatedMapCantBeCreatedOnLiteMemberException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ReplicatedMapCantBeCreatedOnLiteMemberException(message);
}
});
register(ClientProtocolErrorCodes.MAX_MESSAGE_SIZE_EXCEEDED, MaxMessageSizeExceeded.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new MaxMessageSizeExceeded();
}
});
register(ClientProtocolErrorCodes.WAN_REPLICATION_QUEUE_FULL, WANReplicationQueueFullException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new WANReplicationQueueFullException(message);
}
});
register(ClientProtocolErrorCodes.ASSERTION_ERROR, AssertionError.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new AssertionError(message);
}
});
register(ClientProtocolErrorCodes.OUT_OF_MEMORY_ERROR, OutOfMemoryError.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new OutOfMemoryError(message);
}
});
register(ClientProtocolErrorCodes.STACK_OVERFLOW_ERROR, StackOverflowError.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new StackOverflowError(message);
}
});
register(ClientProtocolErrorCodes.NATIVE_OUT_OF_MEMORY_ERROR, NativeOutOfMemoryError.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NativeOutOfMemoryError(message, cause);
}
});
register(ClientProtocolErrorCodes.SERVICE_NOT_FOUND, ServiceNotFoundException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ServiceNotFoundException(message);
}
});
register(ClientProtocolErrorCodes.STALE_TASK_ID, StaleTaskIdException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new StaleTaskIdException(message);
}
});
register(ClientProtocolErrorCodes.DUPLICATE_TASK, DuplicateTaskException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new DuplicateTaskException(message);
}
});
register(ClientProtocolErrorCodes.STALE_TASK, StaleTaskException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new StaleTaskException(message);
}
});
register(ClientProtocolErrorCodes.LOCAL_MEMBER_RESET, LocalMemberResetException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new LocalMemberResetException(message);
}
});
register(ClientProtocolErrorCodes.INDETERMINATE_OPERATION_STATE, IndeterminateOperationStateException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new IndeterminateOperationStateException(message, cause);
}
});
register(ClientProtocolErrorCodes.FLAKE_ID_NODE_ID_OUT_OF_RANGE_EXCEPTION, NodeIdOutOfRangeException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new NodeIdOutOfRangeException(message);
}
});
register(ClientProtocolErrorCodes.TARGET_NOT_REPLICA_EXCEPTION, TargetNotReplicaException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new TargetNotReplicaException(message);
}
});
register(ClientProtocolErrorCodes.MUTATION_DISALLOWED_EXCEPTION, MutationDisallowedException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new MutationDisallowedException(message);
}
});
register(ClientProtocolErrorCodes.CONSISTENCY_LOST_EXCEPTION, ConsistencyLostException.class, new ExceptionFactory() {
@Override
public Throwable createException(String message, Throwable cause) {
return new ConsistencyLostException(message);
}
});
}
public Throwable createException(ClientMessage clientMessage) {
ErrorCodec parameters = ErrorCodec.decode(clientMessage);
// first, try to search for the marker to see, if there are any "hidden" causes
boolean causedByMarkerFound = false;
for (int i = 0; ! causedByMarkerFound && i < parameters.stackTrace.length; i++) {
causedByMarkerFound = parameters.stackTrace[i].getClassName().startsWith(CAUSED_BY_STACKTRACE_MARKER);
}
if (causedByMarkerFound) {
// This exception has a cause and is from a 3.8+ node
StackTraceElement[] st = parameters.stackTrace;
// Iterate from the end
int pos = st.length;
int lastPos = pos;
Throwable t = null;
while (pos >= 0) {
Throwable t1 = null;
if (pos == 0) {
// the root exception
t1 = createException(parameters.errorCode, parameters.className, parameters.message, t);
} else if (st[pos - 1].getClassName().startsWith(CAUSED_BY_STACKTRACE_MARKER)) {
Matcher matcher = CAUSED_BY_STACKTRACE_PARSER.matcher(st[pos - 1].getClassName());
if (matcher.find()) {
int errorCode = Integer.parseInt(matcher.group(CAUSED_BY_STACKTRACE_PARSER_ERROR_CODE_GROUP));
String className = matcher.group(CAUSED_BY_STACKTRACE_PARSER_CLASS_NAME_GROUP);
String message = matcher.group(CAUSED_BY_STACKTRACE_PARSER_MESSAGE_GROUP);
t1 = createException(errorCode, className, message, t);
} else {
// unexpected text, just parse somehow
t1 = createException(ClientProtocolErrorCodes.UNDEFINED, st[pos - 1].toString(), null, t);
}
}
if (t1 != null) {
t1.setStackTrace(Arrays.copyOfRange(st, pos, lastPos));
pos--;
lastPos = pos;
t = t1;
}
pos--;
}
return t;
} else {
// In this case, the exception does not have a cause, or is from a pre-3.8 node (3.7 or older)
Throwable cause = null;
// this is for backwards compatibility, currently not used (causes and their causes are hidden in the root stacktrace)
if (parameters.causeClassName != null) {
cause = createException(parameters.causeErrorCode, parameters.causeClassName, null, null);
}
Throwable throwable = createException(parameters.errorCode, parameters.className, parameters.message, cause);
throwable.setStackTrace(parameters.stackTrace);
return throwable;
}
}
private Throwable createException(int errorCode, String className, String message, Throwable cause) {
ExceptionFactory exceptionFactory = intToFactory.get(errorCode);
Throwable throwable;
if (exceptionFactory == null) {
throwable = new UndefinedErrorCodeException(message, className);
} else {
throwable = exceptionFactory.createException(message, cause);
}
return throwable;
}
// method is used by Jet
@SuppressWarnings("WeakerAccess")
public void register(int errorCode, Class clazz, ExceptionFactory exceptionFactory) {
if (intToFactory.containsKey(errorCode)) {
throw new HazelcastException("Code " + errorCode + " already used");
}
if (!clazz.equals(exceptionFactory.createException("", null).getClass())) {
throw new HazelcastException("Exception factory did not produce an instance of expected class");
}
intToFactory.put(errorCode, exceptionFactory);
}
public interface ExceptionFactory {
Throwable createException(String message, Throwable cause);
}
}
| |
/*
* @(#)NelderMeadSimplex.java created Feb 26, 2006 Casalino
*
* Copyright (c) 1996-2006 Luca Lutterotti All Rights Reserved.
*
* This software is the research result of Luca Lutterotti and it is
* provided as it is as confidential and proprietary information.
* You shall not disclose such Confidential Information and shall use
* it only in accordance with the terms of the license agreement you
* entered into with the author.
*
* THE AUTHOR MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE
* SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE, OR NON-INFRINGEMENT. THE AUTHOR SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING
* THIS SOFTWARE OR ITS DERIVATIVES.
*
*/
package it.unitn.ing.rista.comp;
import java.util.*;
import it.unitn.ing.rista.interfaces.Function;
import it.unitn.ing.rista.util.Misc;
/**
* The NelderMeadSimplex is a class that
* contains methods for finding the values of the
* function parameters that minimise that function
* using the Nelder and Mead Simplex method.
*
* The function needed by the minimisation method
* is supplied by means of the interface, Function
*
* Derived from Dr Michael Thomas Flanagan's class Minimisation
* modified for JDK 1.4 support, removed output, randomize start
* and integration in the Maud package
*
* See original autor details in the following as well as the Flanagan copyright:
*
* DATE: April 2003
* MODIFIED: 29 December 2005
*
* DOCUMENTATION:
* See Michael Thomas Flanagan's Java library on-line web page:
* Minimisation.html
*
* Copyright (c) April 2003
*
* PERMISSION TO COPY:
* Permission to use, copy and modify this software and its documentation for
* NON-COMMERCIAL purposes is granted, without fee, provided that an acknowledgement
* to the author, Michael Thomas Flanagan at www.ee.ucl.ac.uk/~mflanaga, appears in all copies.
*
* Dr Michael Thomas Flanagan makes no representations about the suitability
* or fitness of the software for any or for a particular purpose.
* Michael Thomas Flanagan shall not be liable for any damages suffered
* as a result of using, modifying or distributing this software or its derivatives.
*
* @author Luca Lutterotti
* @author Michael Thomas Flanagan
* @version $Revision: 1.1 $, $Date: 2006/07/20 14:06:04 $
* @since JDK1.1
*/
public class NelderMeadSimplex {
private int nParam = 0; // number of unknown parameters to be estimated
private double[] paramValue = null; // function parameter values (returned at function minimum)
private String[] paraName = null; // names of parameters, eg, c[0], c[1], c[2] . . .
private double functValue = 0.0D; // current value of the function to be minimised
private double lastFunctValNoCnstrnt = 0.0D;// Last function value with no constraint penalty
private double minimum = 0.0D; // value of the function to be minimised at the minimum
private int prec = 4; // number of places to which double variables are truncated on output to text files
private int field = 13; // field width on output to text files
private boolean convStatus = false; // Status of minimisation on exiting minimisation method
// = true - convergence criterion was met
// = false - convergence criterion not met - current estimates returned
private int scaleOpt = 0; // if = 0; no scaling of initial estimates
// if = 1; initial simplex estimates scaled to unity
// if = 2; initial estimates scaled by user provided values in scale[]
// (default = 0)
private double[] scale = null; // values to scale initial estimate (see scaleOpt above)
private boolean penalty = false; // true if single parameter penalty function is included
private boolean sumPenalty = false; // true if multiple parameter penalty function is included
private int nConstraints = 0; // number of single parameter constraints
private int nSumConstraints = 0; // number of multiple parameter constraints
private Vector penalties = new Vector();// 3 method index,
// number of single parameter constraints,
// then repeated for each constraint:
// penalty parameter index,
// below or above constraint flag,
// constraint boundary value
private Vector sumPenalties = new Vector();// constraint method index,
// number of multiple parameter constraints,
// then repeated for each constraint:
// number of parameters in summation
// penalty parameter indices,
// summation signs
// below or above constraint flag,
// constraint boundary value
private int[] penaltyCheck = null; // = -1 values below the single constraint boundary not allowed
// = +1 values above the single constraint boundary not allowed
private int[] sumPenaltyCheck = null; // = -1 values below the multiple constraint boundary not allowed
// = +1 values above the multiple constraint boundary not allowed
private double penaltyWeight = 1.0e30; // weight for the penalty functions
private int[] penaltyParam = null; // indices of paramaters subject to single parameter constraint
private int[][] sumPenaltyParam = null; // indices of paramaters subject to multiple parameter constraint
private int[][] sumPlusOrMinus = null; // sign before each parameter in multiple parameter summation
private int[] sumPenaltyNumber = null; // number of paramaters in each multiple parameter constraint
private double[] constraints = null; // single parameter constraint values
private double[] sumConstraints = null; // multiple parameter constraint values
private int constraintMethod = 0; // constraint method number
// =0: cliff to the power two (only method at present)
private int nMax = 3000; // Nelder and Mead simplex maximum number of iterations
private int nIter = 0; // Nelder and Mead simplex number of iterations performed
private int konvge = 3; // Nelder and Mead simplex number of restarts allowed
private int kRestart = 0; // Nelder and Mead simplex number of restarts taken
private double fTol = 1e-13; // Nelder and Mead simplex convergence tolerance
public double rCoeff = 1.0D; // Nelder and Mead simplex reflection coefficient
public double eCoeff = 2.0D; // Nelder and Mead simplex extension coefficient
public double cCoeff = 0.5D; // Nelder and Mead simplex contraction coefficient
private double[] startH = null; // Nelder and Mead simplex initial estimates
private double[] step = null; // Nelder and Mead simplex step values
private double dStep = 0.5D; // Nelder and Mead simplex default step value
private int minTest = 0; // Nelder and Mead minimum test
// = 0; tests simplex sd < fTol
// allows options for further tests to be added later
private double simplexSd = 0.0D; // simplex standard deviation
ec.util.MersenneTwisterFast randomizer = null;
public boolean randomizeStart = false;
//Constructors
// Constructor with data with x as 2D array and weights provided
public NelderMeadSimplex() {
}
SimplexMethodRefinement theParent = null;
public NelderMeadSimplex(SimplexMethodRefinement parent) {
theParent = parent;
}
// Nelder and Mead Simplex minimisation
public void nelderMead(Function g, double[] start, double[] step, double fTol, int nMax) {
if (randomizeStart)
initRandomizer();
boolean testContract = false; // test whether a simplex contraction has been performed
int np = start.length; // number of unknown parameters;
this.nParam = np;
this.convStatus = true;
int nnp = np + 1; // Number of simplex apices
this.lastFunctValNoCnstrnt = 0.0D;
if (this.scaleOpt < 2) this.scale = new double[np];
if (scaleOpt == 2 && scale.length != start.length)
throw new IllegalArgumentException("scale array and initial estimate array are of different lengths");
if (step.length != start.length)
throw new IllegalArgumentException("step array length " + step.length + " and initial estimate array length " + start.length + " are of different");
// check for zero step sizes
for (int i = 0; i < np; i++) if (step[i] == 0.0D) throw new IllegalArgumentException("step " + i + " size is zero");
// set up arrays
this.paramValue = new double[np];
this.startH = new double[np];
this.step = new double[np];
double[]pmin = new double[np]; //Nelder and Mead Pmin
double[][] pp = new double[nnp][nnp]; //Nelder and Mead P
double[] yy = new double[nnp]; //Nelder and Mead y
double[] pbar = new double[nnp]; //Nelder and Mead P with bar superscript
double[] pstar = new double[nnp]; //Nelder and Mead P*
double[] p2star = new double[nnp]; //Nelder and Mead P**
// Set any single parameter constraint parameters
if (this.penalty) {
Integer itemp = (Integer) this.penalties.elementAt(1);
this.nConstraints = itemp.intValue();
this.penaltyParam = new int[this.nConstraints];
this.penaltyCheck = new int[this.nConstraints];
this.constraints = new double[this.nConstraints];
Double dtemp = null;
int j = 2;
for (int i = 0; i < this.nConstraints; i++) {
itemp = (Integer) this.penalties.elementAt(j);
this.penaltyParam[i] = itemp.intValue();
j++;
itemp = (Integer) this.penalties.elementAt(j);
this.penaltyCheck[i] = itemp.intValue();
j++;
dtemp = (Double) this.penalties.elementAt(j);
this.constraints[i] = dtemp.doubleValue();
j++;
}
}
// Set any multiple parameter constraint parameters
if (this.sumPenalty) {
Integer itemp = (Integer) this.sumPenalties.elementAt(1);
this.nSumConstraints = itemp.intValue();
this.sumPenaltyParam = new int[this.nSumConstraints][];
this.sumPlusOrMinus = new int[this.nSumConstraints][];
this.sumPenaltyCheck = new int[this.nSumConstraints];
this.sumPenaltyNumber = new int[this.nSumConstraints];
this.sumConstraints = new double[this.nSumConstraints];
int[] itempArray = null;
Double dtemp = null;
int j = 2;
for (int i = 0; i < this.nSumConstraints; i++) {
itemp = (Integer) this.sumPenalties.elementAt(j);
this.sumPenaltyNumber[i] = itemp.intValue();
j++;
itempArray = (int[]) this.sumPenalties.elementAt(j);
this.sumPenaltyParam[i] = itempArray;
j++;
itempArray = (int[]) this.sumPenalties.elementAt(j);
this.sumPlusOrMinus[i] = itempArray;
j++;
itemp = (Integer) this.sumPenalties.elementAt(j);
this.sumPenaltyCheck[i] = itemp.intValue();
j++;
dtemp = (Double) this.sumPenalties.elementAt(j);
this.sumConstraints[i] = dtemp.doubleValue();
j++;
}
}
// Store unscaled start values
for (int i = 0; i < np; i++) this.startH[i] = start[i];
// scale initial estimates and step sizes
if (this.scaleOpt > 0) {
boolean testzero = false;
for (int i = 0; i < np; i++) if (start[i] == 0.0D) testzero = true;
if (testzero) {
System.out.println("Neler and Mead Simplex: a start value of zero precludes scaling");
System.out.println("Regression performed without scaling");
this.scaleOpt = 0;
}
}
switch (this.scaleOpt) {
case 0:
for (int i = 0; i < np; i++) scale[i] = 1.0D;
break;
case 1:
for (int i = 0; i < np; i++) {
scale[i] = 1.0 / start[i];
step[i] = step[i] / start[i];
start[i] = 1.0D;
}
break;
case 2:
for (int i = 0; i < np; i++) {
step[i] *= scale[i];
start[i] *= scale[i];
}
break;
}
// set class member values
this.fTol = fTol;
this.nMax = nMax;
this.nIter = 0;
for (int i = 0; i < np; i++) {
this.step[i] = step[i];
this.scale[i] = scale[i];
}
// initial simplex
double sho = 0.0D;
for (int i = 0; i < np; ++i) {
sho = start[i];
pstar[i] = sho;
p2star[i] = sho;
pmin[i] = sho;
}
int jcount = this.konvge; // count of number of restarts still available
if (randomizeStart) {
for (int i = 0; i < nnp; ++i) {
for (int j = 0; j < np; ++j) {
start[j] = randomGenerator(pstar[j], pstar[j] + step[j]);
pp[j][i] = start[j];
}
yy[i] = this.functionValue(g, start);
}
sho = 0.0D;
for (int i = 0; i < np; ++i) {
sho = start[i];
pstar[i] = sho;
p2star[i] = sho;
pmin[i] = sho;
}
} else {
for (int i = 0; i < np; ++i) {
pp[i][nnp - 1] = start[i];
}
yy[nnp - 1] = this.functionValue(g, start);
for (int j = 0; j < np; ++j) {
start[j] = start[j] + step[j];
for (int i = 0; i < np; ++i) pp[i][j] = start[i];
yy[j] = this.functionValue(g, start);
start[j] = start[j] - step[j];
}
}
// loop over allowed iterations
double ynewlo = 0.0D; // current value lowest y
double ystar = 0.0D; // Nelder and Mead y*
double y2star = 0.0D; // Nelder and Mead y**
double ylo = 0.0D; // Nelder and Mead y(low)
double fMin; // function value at minimum
// variables used in calculating the variance of the simplex at a putative minimum
double curMin = 00D, sumnm = 0.0D, summnm = 0.0D, zn = 0.0D;
int ilo = 0; // index of low apex
int ihi = 0; // index of high apex
int ln = 0; // counter for a check on low and high apices
boolean test = true; // test becomes false on reaching minimum
while (test) {
// Determine h
ylo = yy[0];
ynewlo = ylo;
ilo = 0;
ihi = 0;
for (int i = 1; i < nnp; ++i) {
if (yy[i] < ylo) {
ylo = yy[i];
ilo = i;
}
if (yy[i] > ynewlo) {
ynewlo = yy[i];
ihi = i;
}
}
// Calculate pbar
for (int i = 0; i < np; ++i) {
zn = 0.0D;
for (int j = 0; j < nnp; ++j) {
zn += pp[i][j];
}
zn -= pp[i][ihi];
pbar[i] = zn / np;
}
// Calculate p=(1+alpha).pbar-alpha.ph {Reflection}
for (int i = 0; i < np; ++i) pstar[i] = (1.0 + this.rCoeff) * pbar[i] - this.rCoeff * pp[i][ihi];
// Calculate y*
ystar = this.functionValue(g, pstar);
++this.nIter;
// check for y*<yi
if (ystar < ylo) {
// Form p**=(1+gamma).p*-gamma.pbar {Extension}
for (int i = 0; i < np; ++i) p2star[i] = pstar[i] * (1.0D + this.eCoeff) - this.eCoeff * pbar[i];
// Calculate y**
y2star = this.functionValue(g, p2star);
++this.nIter;
if (y2star < ylo) {
// Replace ph by p**
for (int i = 0; i < np; ++i) pp[i][ihi] = p2star[i];
yy[ihi] = y2star;
} else {
//Replace ph by p*
for (int i = 0; i < np; ++i) pp[i][ihi] = pstar[i];
yy[ihi] = ystar;
}
} else {
// Check y*>yi, i!=h
ln = 0;
for (int i = 0; i < nnp; ++i) if (i != ihi && ystar > yy[i]) ++ln;
if (ln == np) {
// y*>= all yi; Check if y*>yh
if (ystar <= yy[ihi]) {
// Replace ph by p*
for (int i = 0; i < np; ++i) pp[i][ihi] = pstar[i];
yy[ihi] = ystar;
}
// Calculate p** =beta.ph+(1-beta)pbar {Contraction}
for (int i = 0; i < np; ++i) p2star[i] = this.cCoeff * pp[i][ihi] + (1.0 - this.cCoeff) * pbar[i];
// Calculate y**
y2star = this.functionValue(g, p2star);
++this.nIter;
// Check if y**>yh
if (y2star > yy[ihi]) {
//Replace all pi by (pi+pl)/2
for (int j = 0; j < nnp; ++j) {
for (int i = 0; i < np; ++i) {
pp[i][j] = 0.5 * (pp[i][j] + pp[i][ilo]);
pmin[i] = pp[i][j];
}
yy[j] = this.functionValue(g, pmin);
}
this.nIter += nnp;
} else {
// Replace ph by p**
for (int i = 0; i < np; ++i) pp[i][ihi] = p2star[i];
yy[ihi] = y2star;
}
} else {
// replace ph by p*
for (int i = 0; i < np; ++i) pp[i][ihi] = pstar[i];
yy[ihi] = ystar;
}
}
// test for convergence
// calculte sd of simplex and minimum point
sumnm = 0.0;
ynewlo = yy[0];
ilo = 0;
for (int i = 0; i < nnp; ++i) {
sumnm += yy[i];
if (ynewlo > yy[i]) {
ynewlo = yy[i];
ilo = i;
}
}
sumnm /= (double) (nnp);
summnm = 0.0;
for (int i = 0; i < nnp; ++i) {
zn = yy[i] - sumnm;
summnm += zn * zn;
}
curMin = Math.sqrt(summnm / np);
// test simplex sd
switch (this.minTest) {
case 0:
if (curMin < fTol) test = false;
break;
}
this.minimum = ynewlo;
if (!test) {
// store parameter values
for (int i = 0; i < np; ++i) pmin[i] = pp[i][ilo];
yy[nnp - 1] = ynewlo;
// store simplex sd
this.simplexSd = curMin;
// test for restart
--jcount;
if (jcount > 0) {
test = true;
for (int j = 0; j < np; ++j) {
pmin[j] = pmin[j] + step[j];
for (int i = 0; i < np; ++i) pp[i][j] = pmin[i];
yy[j] = this.functionValue(g, pmin);
pmin[j] = pmin[j] - step[j];
}
}
}
if (test && this.nIter > this.nMax) {
System.out.println("Maximum iteration number reached, in Minimisation.simplex(...)");
System.out.println("without the convergence criterion being satisfied");
System.out.println("Current parameter estimates and sfunction value returned");
this.convStatus = false;
// store current estimates
for (int i = 0; i < np; ++i) pmin[i] = pp[i][ilo];
yy[nnp - 1] = ynewlo;
test = false;
}
if (theParent != null) {
double[] bestParm = new double[paramValue.length];
for (int i = 0; i < np; ++i) {
bestParm[i] = pp[i][ihi] / this.scale[i];
}
theParent.updateSolution(bestParm, ynewlo);
}
}
for (int i = 0; i < np; ++i) {
pmin[i] = pp[i][ihi];
paramValue[i] = pmin[i] / this.scale[i];
}
this.minimum = ynewlo;
this.kRestart = this.konvge - jcount;
}
// Nelder and Mead simplex
// Default maximum iterations
public void nelderMead(Function g, double[] start, double[] step, double fTol) {
int nMaxx = this.nMax;
this.nelderMead(g, start, step, fTol, nMaxx);
}
// Nelder and Mead simplex
// Default tolerance
public void nelderMead(Function g, double[] start, double[] step, int nMax) {
double fToll = this.fTol;
this.nelderMead(g, start, step, fToll, nMax);
}
// Nelder and Mead simplex
// Default tolerance
// Default maximum iterations
public void nelderMead(Function g, double[] start, double[] step) {
double fToll = this.fTol;
int nMaxx = this.nMax;
this.nelderMead(g, start, step, fToll, nMaxx);
}
// Nelder and Mead simplex
// Default step option - all step[i] = dStep
public void nelderMead(Function g, double[] start, double fTol, int nMax) {
int n = start.length;
double[] stepp = new double[n];
for (int i = 0; i < n; i++) stepp[i] = this.dStep * start[i];
this.nelderMead(g, start, stepp, fTol, nMax);
}
// Nelder and Mead simplex
// Default maximum iterations
// Default step option - all step[i] = dStep
public void nelderMead(Function g, double[] start, double fTol) {
int n = start.length;
int nMaxx = this.nMax;
double[] stepp = new double[n];
for (int i = 0; i < n; i++) stepp[i] = this.dStep * start[i];
this.nelderMead(g, start, stepp, fTol, nMaxx);
}
// Nelder and Mead simplex
// Default tolerance
// Default step option - all step[i] = dStep
public void nelderMead(Function g, double[] start, int nMax) {
int n = start.length;
double fToll = this.fTol;
double[] stepp = new double[n];
for (int i = 0; i < n; i++) stepp[i] = this.dStep * start[i];
this.nelderMead(g, start, stepp, fToll, nMax);
}
// Nelder and Mead simplex
// Default tolerance
// Default maximum iterations
// Default step option - all step[i] = dStep
public void nelderMead(Function g, double[] start) {
int n = start.length;
int nMaxx = this.nMax;
double fToll = this.fTol;
double[] stepp = new double[n];
for (int i = 0; i < n; i++) stepp[i] = this.dStep * start[i];
this.nelderMead(g, start, stepp, fToll, nMaxx);
}
// Calculate the function value for minimisation
private double functionValue(Function g, double[] x) {
double funcVal = -3.0D;
double[] param = new double[this.nParam];
// rescale
for (int i = 0; i < this.nParam; i++) param[i] = x[i] / scale[i];
// single parameter penalty functions
double tempFunctVal = this.lastFunctValNoCnstrnt;
boolean test = true;
if (this.penalty) {
int k = 0;
for (int i = 0; i < this.nConstraints; i++) {
k = this.penaltyParam[i];
if (this.penaltyCheck[i] == -1) {
if (param[k] < constraints[i]) {
funcVal = tempFunctVal + this.penaltyWeight * Fmath.square(param[k] - constraints[i]);
test = false;
}
}
if (this.penaltyCheck[i] == 1) {
if (param[k] > constraints[i]) {
funcVal = tempFunctVal + this.penaltyWeight * Fmath.square(param[k] - constraints[i]);
test = false;
}
}
}
}
// multiple parameter penalty functions
if (this.sumPenalty) {
int kk = 0;
int pSign = 0;
double sumPenaltySum = 0.0D;
for (int i = 0; i < this.nSumConstraints; i++) {
for (int j = 0; j < this.sumPenaltyNumber[i]; j++) {
kk = this.sumPenaltyParam[i][j];
pSign = this.sumPlusOrMinus[i][j];
sumPenaltySum += param[kk] * pSign;
}
if (this.sumPenaltyCheck[i] == -1) {
if (sumPenaltySum < sumConstraints[i]) {
funcVal = tempFunctVal + this.penaltyWeight * Fmath.square(sumPenaltySum - sumConstraints[i]);
test = false;
}
}
if (this.sumPenaltyCheck[i] == 1) {
if (sumPenaltySum > sumConstraints[i]) {
funcVal = tempFunctVal + this.penaltyWeight * Fmath.square(sumPenaltySum - sumConstraints[i]);
test = false;
}
}
}
}
if (test) {
g.setFreeParameters(param);
funcVal = g.getWSS();
this.lastFunctValNoCnstrnt = funcVal;
}
return funcVal;
}
// add a single parameter constraint boundary for the minimisation
public void addConstraint(int paramIndex, int conDir, double constraint) {
this.penalty = true;
// First element reserved for method number if other methods than 'cliff' are added later
if (this.penalties.isEmpty()) this.penalties.addElement(new Integer(this.constraintMethod));
// add constraint
if (penalties.size() == 1) {
this.penalties.addElement(new Integer(1));
} else {
int nPC = ((Integer) this.penalties.elementAt(1)).intValue();
nPC++;
this.penalties.setElementAt(new Integer(nPC), 1);
}
this.penalties.addElement(new Integer(paramIndex));
this.penalties.addElement(new Integer(conDir));
this.penalties.addElement(new Double(constraint));
}
// add a multiple parameter constraint boundary for the minimisation
public void addConstraint(int[] paramIndices, int[] plusOrMinus, int conDir, double constraint) {
int nCon = paramIndices.length;
int nPorM = plusOrMinus.length;
if (nCon != nPorM)
throw new IllegalArgumentException("num of parameters, " + nCon + ", does not equal number of parameter signs, " + nPorM);
this.sumPenalty = true;
// First element reserved for method number if other methods than 'cliff' are added later
if (this.sumPenalties.isEmpty()) this.sumPenalties.addElement(new Integer(this.constraintMethod));
// add constraint
if (sumPenalties.size() == 1) {
this.sumPenalties.addElement(new Integer(1));
} else {
int nPC = ((Integer) this.sumPenalties.elementAt(1)).intValue();
nPC++;
this.sumPenalties.setElementAt(new Integer(nPC), 1);
}
this.sumPenalties.addElement(new Integer(nCon));
this.sumPenalties.addElement(paramIndices);
this.sumPenalties.addElement(plusOrMinus);
this.sumPenalties.addElement(new Integer(conDir));
this.sumPenalties.addElement(new Double(constraint));
}
// Set constraint method
public void setConstraintMethod(int conMeth) {
this.constraintMethod = conMeth;
if (!this.penalties.isEmpty()) this.penalties.setElementAt(new Integer(this.constraintMethod), 0);
}
// remove all constraint boundaries for the minimisation
public void removeConstraints() {
// check if single parameter constraints already set
if (!this.penalties.isEmpty()) {
int m = this.penalties.size();
// remove single parameter constraints
for (int i = m - 1; i >= 0; i--) {
this.penalties.removeElementAt(i);
}
}
this.penalty = false;
this.nConstraints = 0;
// check if mutiple parameter constraints already set
if (!this.sumPenalties.isEmpty()) {
int m = this.sumPenalties.size();
// remove multiple parameter constraints
for (int i = m - 1; i >= 0; i--) {
this.sumPenalties.removeElementAt(i);
}
}
this.sumPenalty = false;
this.nSumConstraints = 0;
}
void initRandomizer() {
// iseed = 2 * ((int) secnds_(0.0)) + 1;
/* run the random number generator 100 times
for avoiding affects of starting value */
int time = (int) System.currentTimeMillis(); // safe because we're getting low-order bits
randomizer = new ec.util.MersenneTwisterFast(time);
return;
}
double randomGenerator() {
double random = randomizer.nextDouble();
while (random == 1.0) // escluding 1.0
random = randomizer.nextDouble();
return random;
}
/**
* return value between min and max excluded
*
* @param min
* @param max
* @return
*/
double randomGenerator(double min, double max) {
return min + (max - min) * randomGenerator();
}
// Print the results of the minimisation
// File name provided
// prec = truncation precision
public void print(String filename, int prec) {
this.prec = prec;
// this.print(filename);
}
// Print the results of the minimisation
// No file name provided
// prec = truncation precision
public void print(int prec) {
this.prec = prec;
// String filename="MinimisationOutput.txt";
// this.print(filename);
}
// Print the results of the minimisation
// File name provided
// prec = truncation precision
/* public void print(String filename){
if(filename.lastIndexOf('.')==-1)filename = filename+".txt";
FileOutput fout = new FileOutput(filename, 'n');
fout.dateAndTimeln(filename);
fout.println(" ");
fout.println("Simplex minimisation, using the method of Nelder and Mead,");
fout.println("of the function y = f(c[0], c[1], c[2] . . .)");
this.paraName = new String[this.nParam];
for(int i=0;i<this.nParam;i++)this.paraName[i]="c["+i+"]";
fout.println();
if(!this.convStatus){
fout.println("Convergence criterion was not satisfied");
fout.println("The following results are the current estimates on exiting the minimisation method");
fout.println();
}
fout.println("Value of parameters at the minimum");
fout.println(" ");
fout.printtab(" ", this.field);
fout.printtab("Value at", this.field);
fout.printtab("Initial", this.field);
fout.println("Initial");
fout.printtab(" ", this.field);
fout.printtab("mimium", this.field);
fout.printtab("estimate", this.field);
fout.println("step");
for(int i=0; i<this.nParam; i++){
fout.printtab(this.paraName[i], this.field);
fout.printtab(Fmath.truncate(paramValue[i],this.prec), this.field);
fout.printtab(Fmath.truncate(this.startH[i],this.prec), this.field);
fout.println(Fmath.truncate(this.step[i],this.prec));
}
fout.println();
fout.println(" ");
fout.printtab("Number of paramaters");
fout.println(this.nParam);
fout.printtab("Number of iterations taken");
fout.println(this.nIter);
fout.printtab("Maximum number of iterations allowed");
fout.println(this.nMax);
fout.printtab("Number of restarts taken");
fout.println(this.kRestart);
fout.printtab("Maximum number of restarts allowed");
fout.println(this.konvge);
fout.printtab("Standard deviation of the simplex at the minimum");
fout.println(Fmath.truncate(this.simplexSd, this.prec));
fout.printtab("Convergence tolerance");
fout.println(this.fTol);
switch(minTest){
case 0: if(this.convStatus){
fout.println("simplex sd < the tolerance");
}
else{
fout.println("NOTE!!! simplex sd > the tolerance");
}
break;
}
fout.println();
fout.println("End of file");
fout.close();
} */
// Print the results of the minimisation
// No file name provided
public void print() {
// String filename="MinimisationOutput.txt";
// this.print(filename);
}
// Get the minimisation status
// true if convergence was achieved
// false if convergence not achieved before maximum number of iterations
// current values then returned
public boolean getConvStatus() {
return this.convStatus;
}
// Reset scaling factors (scaleOpt 0 and 1, see below for scaleOpt 2)
public void setScale(int n) {
if (n < 0 || n > 1)
throw new IllegalArgumentException("The argument must be 0 (no scaling) 1(initial estimates all scaled to unity) or the array of scaling factors");
this.scaleOpt = n;
}
// Reset scaling factors (scaleOpt 2, see above for scaleOpt 0 and 1)
public void setScale(double[] sc) {
this.scale = sc;
this.scaleOpt = 2;
}
// Get scaling factors
public double[] getScale() {
return this.scale;
}
// Reset the minimisation convergence test option
public void setMinTest(int n) {
if (n < 0 || n > 1) throw new IllegalArgumentException("minTest must be 0 or 1");
this.minTest = n;
}
// Get the minimisation convergence test option
public int getMinTest() {
return this.minTest;
}
// Get the simplex sd at the minimum
public double getSimplexSd() {
return this.simplexSd;
}
// Get the values of the parameters at the minimum
public double[] getParamValues() {
return this.paramValue;
}
// Get the function value at minimum
public double getMinimum() {
return this.minimum;
}
// Get the number of iterations in Nelder and Mead
public int getNiter() {
return this.nIter;
}
// Set the maximum number of iterations allowed in Nelder and Mead
public void setNmax(int nmax) {
this.nMax = nmax;
}
// Get the maximum number of iterations allowed in Nelder and Mead
public int getNmax() {
return this.nMax;
}
// Get the number of restarts in Nelder and Mead
public int getNrestarts() {
return this.kRestart;
}
// Set the maximum number of restarts allowed in Nelder and Mead
public void setNrestartsMax(int nrs) {
this.konvge = nrs;
}
// Get the maximum number of restarts allowed in Nelder amd Mead
public int getNrestartsMax() {
return this.konvge;
}
// Reset the Nelder and Mead reflection coefficient [alpha]
public void setNMreflect(double refl) {
this.rCoeff = refl;
}
// Get the Nelder and Mead reflection coefficient [alpha]
public double getNMreflect() {
return this.rCoeff;
}
// Reset the Nelder and Mead extension coefficient [beta]
public void setNMextend(double ext) {
this.eCoeff = ext;
}
// Get the Nelder and Mead extension coefficient [beta]
public double getNMextend() {
return this.eCoeff;
}
// Reset the Nelder and Mead contraction coefficient [gamma]
public void setNMcontract(double con) {
this.cCoeff = con;
}
// Get the Nelder and Mead contraction coefficient [gamma]
public double getNMcontract() {
return cCoeff;
}
// Set the minimisation tolerance
public void setTolerance(double tol) {
this.fTol = tol;
}
// Get the minimisation tolerance
public double getTolerance() {
return this.fTol;
}
}
| |
package monasca.statsd;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.math3.util.Precision;
/**
* A simple StatsD client implementation facilitating metrics recording.
*
* <p>Upon instantiation, this client will establish a socket connection to a StatsD instance
* running on the specified host and port. Metrics are then sent over this connection as they are
* received by the client.
* </p>
*
* <p>Three key methods are provided for the submission of data-points for the application under
* scrutiny:
* <ul>
* <li>{@link #incrementCounter} - adds one to the value of the specified named counter</li>
* <li>{@link #recordGaugeValue} - records the latest fixed value for the specified named gauge</li>
* <li>{@link #recordExecutionTime} - records an execution time in milliseconds for the specified named operation</li>
* <li>{@link #recordHistogramValue} - records a value, to be tracked with average, maximum, and percentiles</li>
* </ul>
* </p>
*
* <p>As part of a clean system shutdown, the {@link #stop()} method should be invoked
* on any StatsD clients.</p>
*
* <p>This class is a blocking implementation. It is preferable to use with already existing threading systems or logging systems like slf4j(log4j implementation)</p>
*
*/
public class BlockingStatsDClient extends StatsDClientBase implements StatsDClient {
protected static final StatsDClientErrorHandler NO_OP_HANDLER = new StatsDClientErrorHandler() {
@Override public void handle(Exception e) { /* No-op */ }
};
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages sent via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are consumed, guaranteeing
* that failures in metrics will not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDClient(String prefix, String hostname, int port) throws StatsDClientException {
this(prefix, hostname, port, null, NO_OP_HANDLER);
}
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages send via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if that a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are consumed, guaranteeing
* that failures in metrics will not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @param defaultDimensions
* dimensions to be added to all content sent
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDClient(String prefix, String hostname, int port, Map<String, String> defaultDimensions)
throws StatsDClientException {
this(prefix, hostname, port, defaultDimensions, NO_OP_HANDLER);
}
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages send via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if that a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are passed to the specified
* handler and then consumed, guaranteeing that failures in metrics will
* not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @param defaultDimensions
* dimensions to be added to all content sent
* @param errorHandler
* handler to use when an exception occurs during usage
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDClient(String prefix, String hostname, int port, Map<String, String> defaultDimensions,
StatsDClientErrorHandler errorHandler) throws StatsDClientException {
super(prefix, hostname, port, defaultDimensions, errorHandler);
}
/**
* Cleanly shut down this StatsD client. This method may throw an exception if
* the socket cannot be closed.
*/
@Override
public void stop() {
super.stop();
}
/**
* Adjusts the specified counter by a given delta.
*
*
* @param aspect
* the name of the counter to adjust
* @param delta
* the amount to adjust the counter by
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void count(String aspect, long delta, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%d|c%s", prefix, aspect, delta, dimensionString(dimensions)));
}
/**
* Increments the specified counter by one.
*
*
* @param aspect
* the name of the counter to increment
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void incrementCounter(String aspect, Map<String, String> dimensions) {
count(aspect, 1, dimensions);
}
/**
* Convenience method equivalent to {@link #incrementCounter(String, Map<String, String>)}.
*/
@Override
public void increment(String aspect, Map<String, String> dimensions) {
incrementCounter(aspect, dimensions);
}
/**
* Decrements the specified counter by one.
*
*
* @param aspect
* the name of the counter to decrement
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void decrementCounter(String aspect, Map<String, String> dimensions) {
count(aspect, -1, dimensions);
}
/**
* Convenience method equivalent to {@link #decrementCounter(String, Map<String, String>)}.
*/
@Override
public void decrement(String aspect, Map<String, String> dimensions) {
decrementCounter(aspect, dimensions);
}
/**
* Records the latest fixed value for the specified named gauge.
*
*
* @param aspect
* the name of the gauge
* @param value
* the new reading of the gauge
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void recordGaugeValue(String aspect, double value, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%f|g%s", prefix, aspect, Precision.round(value, 6), dimensionString(dimensions)));
}
/**
* Convenience method equivalent to {@link #recordGaugeValue(String, double, Map<String, String>)}.
*/
@Override
public void gauge(String aspect, double value, Map<String, String> dimensions) {
recordGaugeValue(aspect, value, dimensions);
}
/**
* Records the latest fixed value for the specified named gauge.
*
*
* @param aspect
* the name of the gauge
* @param value
* the new reading of the gauge
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void recordGaugeValue(String aspect, long value, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%d|g%s", prefix, aspect, value, dimensionString(dimensions)));
}
/**
* Convenience method equivalent to {@link #recordGaugeValue(String, int, Map<String, String>)}.
*/
@Override
public void gauge(String aspect, long value, Map<String, String> dimensions) {
recordGaugeValue(aspect, value, dimensions);
}
/**
* Records an execution time in milliseconds for the specified named operation.
*
*
* @param aspect
* the name of the timed operation
* @param timeInMs
* the time in milliseconds
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void recordExecutionTime(String aspect, long timeInMs, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%d|ms%s", prefix, aspect, timeInMs, dimensionString(dimensions)));
}
/**
* Convenience method equivalent to {@link #recordExecutionTime(String, long, Map<String, String>)}.
*/
@Override
public void time(String aspect, long value, Map<String, String> dimensions) {
recordExecutionTime(aspect, value, dimensions);
}
/**
* Records a value for the specified named histogram.
*
*
* @param aspect
* the name of the histogram
* @param value
* the value to be incorporated in the histogram
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void recordHistogramValue(String aspect, double value, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%f|h%s", prefix, aspect, Precision.round(value, 6), dimensionString(dimensions)));
}
/**
* Convenience method equivalent to {@link #recordHistogramValue(String, double, Map<String, String>)}.
*/
@Override
public void histogram(String aspect, double value, Map<String, String> dimensions) {
recordHistogramValue(aspect, value, dimensions);
}
/**
* Records a value for the specified named histogram.
*
*
* @param aspect
* the name of the histogram
* @param value
* the value to be incorporated in the histogram
* @param dimensions
* map of dimensions to be added to the data
*/
@Override
public void recordHistogramValue(String aspect, long value, Map<String, String> dimensions) {
blockingSend(String.format("%s%s:%d|h%s", prefix, aspect, value, dimensionString(dimensions)));
}
/**
* Convenience method equivalent to {@link #recordHistogramValue(String, int, Map<String, String>)}.
*/
@Override
public void histogram(String aspect, long value, Map<String, String> dimensions) {
recordHistogramValue(aspect, value, dimensions);
}
private void blockingSend(String message) throws IOException {
final ByteBuffer sendBuffer = ByteBuffer.allocate(PACKET_SIZE_BYTES);
sendBuffer.put(message.getBytes());
super.blockingSend(sendBuffer);
}
}
| |
/*
* Copyright 2014-2020 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.samples.cluster.tutorial;
import io.aeron.CommonContext;
import io.aeron.ExclusivePublication;
import io.aeron.Image;
import io.aeron.archive.Archive;
import io.aeron.archive.ArchiveThreadingMode;
import io.aeron.cluster.*;
import io.aeron.cluster.client.AeronCluster;
import io.aeron.cluster.client.EgressListener;
import io.aeron.cluster.codecs.CloseReason;
import io.aeron.cluster.service.ClientSession;
import io.aeron.cluster.service.Cluster;
import io.aeron.cluster.service.ClusteredService;
import io.aeron.cluster.service.ClusteredServiceContainer;
import io.aeron.driver.MediaDriver;
import io.aeron.driver.ThreadingMode;
import io.aeron.logbuffer.FragmentHandler;
import io.aeron.logbuffer.Header;
import org.agrona.CloseHelper;
import org.agrona.DirectBuffer;
import org.agrona.ErrorHandler;
import org.agrona.ExpandableArrayBuffer;
import org.agrona.concurrent.IdleStrategy;
import org.agrona.concurrent.YieldingIdleStrategy;
import org.agrona.concurrent.status.AtomicCounter;
import org.agrona.console.ContinueBarrier;
import java.util.concurrent.TimeUnit;
import static org.agrona.BitUtil.SIZE_OF_INT;
/**
* Single Node Cluster that includes everything needed to run all in one place. Includes a simple service to show
* event processing, and includes a cluster client.
* <p>
* Perfect for playing around with the Cluster.
*/
public class SingleNodeCluster implements AutoCloseable
{
private static final int MESSAGE_ID = 1;
private static final int TIMER_ID = 2;
// cluster side
private final ClusteredMediaDriver clusteredMediaDriver;
private final ClusteredServiceContainer container;
// cluster client side
private MediaDriver clientMediaDriver;
private AeronCluster client;
private final IdleStrategy idleStrategy = YieldingIdleStrategy.INSTANCE;
private final ExpandableArrayBuffer msgBuffer = new ExpandableArrayBuffer();
private final EgressListener egressMessageListener = new EgressListener()
{
public void onMessage(
final long clusterSessionId,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
System.out.println("egress onMessage " + clusterSessionId);
}
public void onNewLeader(
final long clusterSessionId,
final long leadershipTermId,
final int leaderMemberId,
final String ingressEndpoints)
{
System.out.println("SingleNodeCluster.onNewLeader");
}
};
static class Service implements ClusteredService
{
protected Cluster cluster;
protected IdleStrategy idleStrategy;
private int messageCount = 0;
private final ExpandableArrayBuffer buffer = new ExpandableArrayBuffer();
public void onStart(final Cluster cluster, final Image snapshotImage)
{
this.cluster = cluster;
this.idleStrategy = cluster.idleStrategy();
if (null != snapshotImage)
{
System.out.println("onStart load snapshot");
final FragmentHandler fragmentHandler =
(buffer, offset, length, header) -> messageCount = buffer.getInt(offset);
idleStrategy.reset();
while (snapshotImage.poll(fragmentHandler, 1) <= 0)
{
idleStrategy.idle();
}
System.out.println("snapshot messageCount=" + messageCount);
}
}
public void onSessionOpen(final ClientSession session, final long timestamp)
{
System.out.println("onSessionOpen " + session.id());
}
public void onSessionClose(final ClientSession session, final long timestamp, final CloseReason closeReason)
{
System.out.println("onSessionClose " + session.id() + " " + closeReason);
}
public void onSessionMessage(
final ClientSession session,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
messageCount++;
System.out.println(cluster.role() + " onSessionMessage " + session.id() + " count=" + messageCount);
final int id = buffer.getInt(offset);
if (TIMER_ID == id)
{
idleStrategy.reset();
while (!cluster.scheduleTimer(serviceCorrelationId(1), cluster.time() + 1_000))
{
idleStrategy.idle();
}
}
else
{
idleStrategy.reset();
while (session.offer(buffer, offset, length) < 0)
{
idleStrategy.idle();
}
}
}
public void onTimerEvent(final long correlationId, final long timestamp)
{
System.out.println("onTimerEvent " + correlationId);
final ExpandableArrayBuffer buffer = new ExpandableArrayBuffer();
buffer.putInt(0, 1);
cluster.forEachClientSession(
(clientSession) ->
{
idleStrategy.reset();
while (clientSession.offer(buffer, 0, SIZE_OF_INT) < 0)
{
idleStrategy.idle();
}
});
}
public void onTakeSnapshot(final ExclusivePublication snapshotPublication)
{
System.out.println("onTakeSnapshot messageCount=" + messageCount);
buffer.putInt(0, messageCount);
idleStrategy.reset();
while (snapshotPublication.offer(buffer, 0, 4) < 0)
{
idleStrategy.idle();
}
}
public void onRoleChange(final Cluster.Role newRole)
{
System.out.println("onRoleChange " + newRole);
}
public void onTerminate(final Cluster cluster)
{
}
public void onNewLeadershipTermEvent(
final long leadershipTermId,
final long logPosition,
final long timestamp,
final long termBaseLogPosition,
final int leaderMemberId,
final int logSessionId,
final TimeUnit timeUnit,
final int appVersion)
{
System.out.println("onNewLeadershipTermEvent");
}
protected long serviceCorrelationId(final int correlationId)
{
return ((long)cluster.context().serviceId()) << 56 | correlationId;
}
}
public SingleNodeCluster(final ClusteredService externalService, final boolean cleanStart)
{
final MediaDriver.Context mediaDriverContext = new MediaDriver.Context();
final ConsensusModule.Context consensusModuleContext = new ConsensusModule.Context();
final Archive.Context archiveContext = new Archive.Context();
final ClusteredServiceContainer.Context serviceContainerContext = new ClusteredServiceContainer.Context();
final ClusteredService service = null == externalService ? new SingleNodeCluster.Service() : externalService;
mediaDriverContext
.threadingMode(ThreadingMode.SHARED)
.errorHandler(Throwable::printStackTrace)
.dirDeleteOnShutdown(true)
.dirDeleteOnStart(true);
archiveContext
.recordingEventsEnabled(false)
.threadingMode(ArchiveThreadingMode.SHARED)
.deleteArchiveOnStart(cleanStart);
consensusModuleContext
.errorHandler(Throwable::printStackTrace)
.deleteDirOnStart(cleanStart);
serviceContainerContext
.clusteredService(service)
.errorHandler(Throwable::printStackTrace);
clusteredMediaDriver = ClusteredMediaDriver.launch(
mediaDriverContext,
archiveContext,
consensusModuleContext);
container = ClusteredServiceContainer.launch(serviceContainerContext);
}
public void close()
{
final ErrorHandler errorHandler = clusteredMediaDriver.mediaDriver().context().errorHandler();
CloseHelper.close(errorHandler, client);
CloseHelper.close(errorHandler, clientMediaDriver);
CloseHelper.close(errorHandler, clusteredMediaDriver.consensusModule());
CloseHelper.close(errorHandler, container);
CloseHelper.close(clusteredMediaDriver); // ErrorHandler will be closed during that call so can't use it
}
void connectClientToCluster()
{
final String aeronDirectoryName = CommonContext.getAeronDirectoryName() + "-client";
clientMediaDriver = MediaDriver.launch(
new MediaDriver.Context()
.threadingMode(ThreadingMode.SHARED)
.dirDeleteOnStart(true)
.dirDeleteOnShutdown(true)
.errorHandler(Throwable::printStackTrace)
.aeronDirectoryName(aeronDirectoryName));
client = AeronCluster.connect(
new AeronCluster.Context()
.errorHandler(Throwable::printStackTrace)
.egressListener(egressMessageListener)
.aeronDirectoryName(aeronDirectoryName));
}
void sendMessageToCluster(final int id, final int messageLength)
{
msgBuffer.putInt(0, id);
idleStrategy.reset();
while (client.offer(msgBuffer, 0, messageLength) < 0)
{
idleStrategy.idle();
}
}
int pollEgress()
{
return null == client ? 0 : client.pollEgress();
}
void pollEgressUntilMessage()
{
idleStrategy.reset();
while (pollEgress() <= 0)
{
idleStrategy.idle();
}
}
void takeSnapshot()
{
final ConsensusModule.Context consensusModuleContext = clusteredMediaDriver.consensusModule().context();
final AtomicCounter snapshotCounter = consensusModuleContext.snapshotCounter();
final long snapshotCount = snapshotCounter.get();
final AtomicCounter controlToggle = ClusterControl.findControlToggle(
clusteredMediaDriver.mediaDriver().context().countersManager(),
consensusModuleContext.clusterId());
ClusterControl.ToggleState.SNAPSHOT.toggle(controlToggle);
idleStrategy.reset();
while (snapshotCounter.get() <= snapshotCount)
{
idleStrategy.idle();
}
}
static void sendSingleMessageAndEchoBack()
{
try (SingleNodeCluster cluster = new SingleNodeCluster(null, true))
{
cluster.connectClientToCluster();
cluster.sendMessageToCluster(MESSAGE_ID, 4);
cluster.pollEgressUntilMessage();
final ContinueBarrier barrier = new ContinueBarrier("continue");
barrier.await();
}
}
static void loadPreviousLogAndSendAnotherMessageAndEchoBack()
{
try (SingleNodeCluster cluster = new SingleNodeCluster(null, false))
{
cluster.connectClientToCluster();
cluster.sendMessageToCluster(MESSAGE_ID, 4);
cluster.pollEgressUntilMessage();
final ContinueBarrier barrier = new ContinueBarrier("continue");
barrier.await();
}
}
public static void main(final String[] args)
{
sendSingleMessageAndEchoBack();
loadPreviousLogAndSendAnotherMessageAndEchoBack();
}
}
| |
package logic;
import processing.core.PApplet;
//TODO: turn ruleset into a byte[]. turn numStates into a byte.
public class Automaton {
private int[] ruleset; // each index is a base-n number that represents a
// neighborhood where n is the number of states.
// the value at the index gives the resulting state
// of the cell surrounded by that neighborhood.
private int numStates;
private Vertex[] neighborFunction; // a sequence of pairs (each pair defines
// an x and a y offset from the cell)
private int widthOfNeighborhood, heightOfNeighborhood; // in terms of cells,
// not pixels
private BoundaryFunction xEdgeFunction, yEdgeFunction;
/***************************
****** Initialization *****
***************************/
public Automaton(Vertex[] neighborFunction, int numStates) {
this(neighborFunction, numStates, new int[(int) PApplet.pow(numStates, neighborFunction.length)]);
}
public Automaton(Vertex[] neighborFunction, int numStates, int[] ruleset) {
this.neighborFunction = neighborFunction;
this.numStates = numStates;
int numRules = (int) PApplet.pow(numStates, numNeighbors());
boolean validRuleset = ruleset.length == numRules;
if (validRuleset) {
this.ruleset = ruleset;
} else {
System.err.println("Invalid combination of rules, states, and neighbors.");
this.ruleset = new int[numRules];
}
computeWidthAndHeightOfNeighborhood();
initEdgeFunctions();
}
private void computeWidthAndHeightOfNeighborhood() {
int minY = 0;
int maxY = 0;
int minX = 0;
int maxX = 0;
for (int i = 0; i < neighborFunction.length; i++) {
if (neighborFunction[i].x < minX)
minX = neighborFunction[i].x;
if (neighborFunction[i].x > maxX)
maxX = neighborFunction[i].x;
if (neighborFunction[i].y < minY)
minY = neighborFunction[i].y;
if (neighborFunction[i].y > maxY)
maxY = neighborFunction[i].y;
}
widthOfNeighborhood = maxX - minX + 1;
heightOfNeighborhood = maxY - minY + 1;
}
private void initEdgeFunctions() {
xEdgeFunction = RemainderFunction.instance;
yEdgeFunction = RemainderFunction.instance;
}
/**
* Randomizes the rules to this Automata.
*/
public void randomizeRules() {
for (int i = 0; i < ruleset.length; i++) {
ruleset[i] = (int) (Math.random() * numStates);
}
}
public void addRule(int neighborhoodNum, int result) {
ruleset[neighborhoodNum] = result;
}
public void addRule(int[] ruleString, int result) {
// intialize buffer array that will represent ruleNums one at a time.
int[] ruleNumArray = new int[ruleString.length];
for (int i = 0; i < ruleString.length; i++) {
ruleNumArray[i] = (ruleString[i] == -1) ? 0 : ruleString[i];
}
int ruleNum = toInt(ruleNumArray, numStates);
System.out.println("ruleNum = " + ruleNum);
ruleset[ruleNum] = result;
// enumerate through all the ruleNums the ruleString represents from
// smallest ruleNum to largest.
int numRulesWritten = 1;
int i = ruleString.length - 1;
while (i >= 0) {
if (ruleString[i] == -1) {
ruleNumArray[i]++;
if (ruleNumArray[i] == numStates) {
ruleNumArray[i] = 0;
i--;
} else {
numRulesWritten++;
ruleNum = toInt(ruleNumArray, numStates);
ruleset[ruleNum] = result;
i = ruleString.length - 1;
System.out.println("ruleNum = " + ruleNum);
}
} else {
i--;
}
}
}
/**
* Interprets the int array as an integer in base-n
*
* @param array
* @param n
* @return
*/
private static int toInt(int[] array, int n) {
int output = 0;
int digitWeight = array.length - 1;
for (int i = 0; i < array.length; i++) {
if (array[i] != 0) {
output += array[i] * (int) Math.pow(n, digitWeight);
}
digitWeight--;
}
return output;
}
/***********************
****** Evaluation *****
***********************/
/**
* Evaluates the grid according to the rules of this Automata and returns
* the result.
*
* @param grid
* The grid to evaluate
* @param w
* The width of the grid in cells
* @param h
* The height of the grid in cells
* @return The next generation grid
*/
public int[][] evalGrid(int[][] grid, int w, int h, int[][] buffer) {
for (int i = 0; i < w; i++) {
for (int j = 0; j < h; j++) {
buffer[i][j] = evalCell(grid, w, h, i, j);
}
}
return buffer;
}
/**
* Evaluates the (i,j) cell according to the rules of this Automata and
* return the result.
*
* @param currGenGrid
* The grid pre-evaluation
* @param w
* The width of the grid in cells
* @param h
* The height of the grid in cells
* @param i
* The i index of the cell
* @param j
* The j index of the cell
* @return The state of the (i,j) cell after evaluating i
*/
private int evalCell(int[][] currGenGrid, int w, int h, int i, int j) {
// enumerate through this cell's neighbors
// and write to the ruleNum digit by digit, left to right
// to eventually write the number into ruleNum that cooresponds to the
// approriate rule
int ruleNum = 0;
int digitWeight = ruleset.length / numStates; // which equals
// pow(numStates,
// numNeighbors)/numStates
for (int k = 0; k < neighborFunction.length; k++) {
int neighbor_x = xEdgeFunction.execute(i + neighborFunction[k].x, w);
int neighbor_y = yEdgeFunction.execute(j + neighborFunction[k].y, h);
int digitVal = currGenGrid[neighbor_x][neighbor_y];
ruleNum += digitWeight * digitVal;
digitWeight /= numStates;
}
// once the number of the rule is found, use the number to look up the
// result of applying the rule
// (does it result in a live cell or a dead cell?)
int result = ruleset[ruleNum];
return result;
}
/********************
****** Getters *****
********************/
/**
* Gives the number of possible states a cell can be in.
*
* @return The number of possible states a cell can be in
*/
public int numStates() {
return numStates;
}
/**
* Gives the number of rules in the Automata, including the rules not
* explicitly defined by the user. The number of rules is a function of the
* size of the neighborhood. Specifically, numRules =
* numNeighbors^numStates.
*
* @return The number of rules in the Automata
*/
public int numRules() {
return ruleset.length;
}
/**
* Gives the number of neighbors in the Automata's neighborhood.
*
* @return The size of the neighborhood
*/
public int numNeighbors() {
return neighborFunction.length;
}
/**
* Gives the coordinate of the neighbor (how many cells left and down the
* neighbor cell is from the center cell)
*
* @param neighborNum
* The index of the neighbor
* @return The coordinate of the neighbor
*/
public Vertex neighborInRelationToCell(int neighborNum) {
return neighborFunction[neighborNum];
}
/**
* Gives the number of cells between and including the leftmost cell in the
* neighborhood and the rightmost cell in the neighborhood.
*
* @return The width of the neighborhood
*/
public int widthOfNeighborhood() {
return widthOfNeighborhood;
}
/**
* Gives the number of cells between and including the lowest cell in the
* neighborhood and the highest cell in the neighborhood.
*
* @return The height of the neighborhood
*/
public int heightOfNeighborhood() {
return heightOfNeighborhood;
}
/********************
****** Utility *****
********************/
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.util.collection;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.emptyArray;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.sameInstance;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ArrayUtilsTest extends HazelcastTestSupport {
@Test
public void testConstructor() {
assertUtilityConstructor(ArrayUtils.class);
}
@Test
public void createCopy_whenZeroLengthArray_thenReturnDifferentZeroLengthArray() {
Object[] original = new Object[0];
Object[] result = ArrayUtils.createCopy(original);
assertThat(result, not(sameInstance(original)));
assertThat(result, emptyArray());
}
@Test
public void createCopy_whenNonZeroLengthArray_thenReturnDifferentArrayWithItems() {
Object[] original = new Object[1];
Object o = new Object();
original[0] = o;
Object[] result = ArrayUtils.createCopy(original);
assertThat(result, not(sameInstance(original)));
assertThat(result, arrayWithSize(1));
assertThat(result[0], sameInstance(o));
}
@Test
public void copyWithoutNulls_whenSrcHasNullItem_thenDoNotCopyItIntoTarget() {
Object[] src = new Object[1];
src[0] = null;
Object[] dst = new Object[0];
ArrayUtils.copyWithoutNulls(src, dst);
assertThat(dst, emptyArray());
}
@Test
public void copyWithoutNulls_whenSrcHasNonNullItem_thenCopyItIntoTarget() {
Object[] src = new Object[1];
Object o = new Object();
src[0] = o;
Object[] dst = new Object[1];
ArrayUtils.copyWithoutNulls(src, dst);
assertThat(dst, arrayWithSize(1));
assertThat(dst[0], sameInstance(o));
}
@Test
public void contains() {
Object[] array = new Object[1];
Object object = new Object();
array[0] = object;
assertTrue(ArrayUtils.contains(array, object));
}
@Test
public void contains_returnsFalse() {
Object[] array = new Object[1];
Object object = new Object();
array[0] = object;
assertFalse(ArrayUtils.contains(array, new Object()));
}
@Test
public void contains_nullValue() {
Object[] array = new Object[1];
array[0] = null;
assertTrue(ArrayUtils.contains(array, null));
}
@Test
public void contains_nullValue_returnsFalse() {
Object[] array = new Object[1];
array[0] = new Object();
assertFalse(ArrayUtils.contains(array, null));
}
@Test
public void getItemAtPositionOrNull_whenEmptyArray_thenReturnNull() {
Object[] src = new Object[0];
Object result = ArrayUtils.getItemAtPositionOrNull(src, 0);
assertNull(result);
}
@Test
public void getItemAtPositionOrNull_whenPositionExist_thenReturnTheItem() {
Object obj = new Object();
Object[] src = new Object[1];
src[0] = obj;
Object result = ArrayUtils.getItemAtPositionOrNull(src, 0);
assertSame(obj, result);
}
@Test
public void getItemAtPositionOrNull_whenSmallerArray_thenReturNull() {
Object obj = new Object();
Object[] src = new Object[1];
src[0] = obj;
Object result = ArrayUtils.getItemAtPositionOrNull(src, 1);
assertNull(result);
}
@Test
public void concat() {
Integer[] first = new Integer[]{1, 2, 3};
Integer[] second = new Integer[]{4};
Integer[] concatenated = new Integer[4];
ArrayUtils.concat(first, second, concatenated);
assertEquals(4, concatenated.length);
assertEquals(Integer.valueOf(1), concatenated[0]);
assertEquals(Integer.valueOf(2), concatenated[1]);
assertEquals(Integer.valueOf(3), concatenated[2]);
assertEquals(Integer.valueOf(4), concatenated[3]);
}
@Test(expected = NullPointerException.class)
public void concat_whenFirstNull() {
Integer[] first = null;
Integer[] second = new Integer[]{4};
Integer[] concatenated = new Integer[4];
ArrayUtils.concat(first, second, concatenated);
fail();
}
@Test(expected = NullPointerException.class)
public void concat_whenSecondNull() {
Integer[] first = new Integer[]{1, 2, 3};
Integer[] second = null;
Integer[] concatenated = new Integer[4];
ArrayUtils.concat(first, second, concatenated);
fail();
}
@Test(expected = NullPointerException.class)
public void concat_whenDestNull() {
Integer[] first = new Integer[]{1, 2, 3};
Integer[] second = new Integer[]{4};
Integer[] concatenated = null;
ArrayUtils.concat(first, second, concatenated);
fail();
}
@Test
public void boundsCheck() {
ArrayUtils.boundsCheck(100, 0, 10);
}
@Test
public void boundsCheck_allZero() {
ArrayUtils.boundsCheck(0, 0, 0);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenMoreThanCapacity() {
ArrayUtils.boundsCheck(100, 0, 110);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenIndexSmallerThanZero() {
ArrayUtils.boundsCheck(100, -1, 110);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenLengthSmallerThanZero() {
ArrayUtils.boundsCheck(100, 0, -1);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenCapacitySmallerThanZero() {
ArrayUtils.boundsCheck(-1, 0, 0);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenLengthIntegerMax() {
//Testing wrapping does not cause false check
ArrayUtils.boundsCheck(0, 10, Integer.MAX_VALUE);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenIndexIntegerMax() {
//Testing wrapping does not cause false check
ArrayUtils.boundsCheck(100, Integer.MAX_VALUE, 1);
}
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenCapacityIntegerMin() {
ArrayUtils.boundsCheck(Integer.MIN_VALUE, 0, 100);
}
}
| |
package hycz.cassandra.testBase;
import java.io.*;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.thrift.TException;
import org.apache.cassandra.client.*;
import org.apache.cassandra.dht.RandomPartitioner;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.thrift.*;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNull;
public abstract class TestBase
{
private static final Logger logger = LoggerFactory.getLogger(TestBase.class);
protected static CassandraServiceController controller =
CassandraServiceController.getInstance();
static class KeyspaceCreation
{
private String name;
private int rf;
private CfDef cfdef;
public KeyspaceCreation(String name)
{
this.name = name;
cfdef = new CfDef(name, "Standard1");
cfdef.setComparator_type("BytesType");
cfdef.setKey_cache_size(10000);
cfdef.setRow_cache_size(1000);
cfdef.setRow_cache_save_period_in_seconds(0);
cfdef.setKey_cache_save_period_in_seconds(3600);
cfdef.setMemtable_flush_after_mins(59);
cfdef.setMemtable_throughput_in_mb(255);
cfdef.setMemtable_operations_in_millions(0.29);
}
public KeyspaceCreation validator(String validator)
{
cfdef.setDefault_validation_class(validator);
return this;
}
public KeyspaceCreation rf(int rf)
{
this.rf = rf;
return this;
}
public void create() throws Exception
{
List<InetAddress> hosts = controller.getHosts();
Cassandra.Client client = controller.createClient(hosts.get(0));
Map<String,String> stratOptions = new HashMap<String,String>();
stratOptions.put("replication_factor", "" + rf);
client.system_add_keyspace(new KsDef(name,
"org.apache.cassandra.locator.SimpleStrategy",
Arrays.asList(cfdef))
.setStrategy_options(stratOptions)
.setCf_defs(Collections.<CfDef>emptyList()));
// poll, until KS added
for (InetAddress host : hosts)
{
try
{
client = controller.createClient(host);
poll:
while (true)
{
List<KsDef> ksDefList = client.describe_keyspaces();
for (KsDef ks : ksDefList)
{
if (ks.name.equals(name))
break poll;
}
try
{
Thread.sleep(1000);
}
catch (InterruptedException e)
{
break poll;
}
}
}
catch (TException te)
{
continue;
}
}
}
}
protected static KeyspaceCreation keyspace(String name)
{
return new KeyspaceCreation(name);
}
protected static void addKeyspace(String name, int rf) throws Exception
{
keyspace(name).rf(rf).create();
}
@BeforeClass
public static void setUp() throws Exception
{
controller.ensureClusterRunning();
}
protected ByteBuffer newKey()
{
return ByteBuffer.wrap(String.format("test.key.%d", System.currentTimeMillis()).getBytes());
}
protected void insert(Cassandra.Client client, ByteBuffer key, String cf, String name, String value, long timestamp, ConsistencyLevel cl)
throws InvalidRequestException, UnavailableException, TimedOutException, TException
{
Column col = new Column(ByteBuffer.wrap(name.getBytes()))
.setValue(ByteBuffer.wrap(value.getBytes()))
.setTimestamp(timestamp);
client.insert(key, new ColumnParent(cf), col, cl);
}
protected Column getColumn(Cassandra.Client client, ByteBuffer key, String cf, String col, ConsistencyLevel cl)
throws InvalidRequestException, UnavailableException, TimedOutException, TException, NotFoundException
{
ColumnPath cpath = new ColumnPath(cf);
cpath.setColumn(col.getBytes());
return client.get(key, cpath, cl).column;
}
protected class Get extends RetryingAction<String>
{
public Get(Cassandra.Client client, String cf, ByteBuffer key)
{
super(client, cf, key);
}
public void tryPerformAction(ConsistencyLevel cl) throws Exception
{
assertColumnEqual(name, value, timestamp, getColumn(client, key, cf, name, cl));
}
}
protected class Insert extends RetryingAction<String>
{
public Insert(Cassandra.Client client, String cf, ByteBuffer key)
{
super(client, cf, key);
}
public void tryPerformAction(ConsistencyLevel cl) throws Exception
{
insert(client, key, cf, name, value, timestamp, cl);
}
}
/** Performs an action repeatedly until timeout, success or failure. */
protected abstract class RetryingAction<T>
{
protected Cassandra.Client client;
protected String cf;
protected ByteBuffer key;
protected String name;
protected T value;
protected long timestamp;
private Set<Class<Exception>> expected = new HashSet<Class<Exception>>();
private long timeout = StorageService.RING_DELAY;
public RetryingAction(Cassandra.Client client, String cf, ByteBuffer key)
{
this.client = client;
this.cf = cf;
this.key = key;
this.timestamp = 0;
}
public RetryingAction name(String name)
{
this.name = name; return this;
}
/** A parameterized value for the action. */
public RetryingAction value(T value)
{
this.value = value; return this;
}
/** The total time to allow before failing. */
public RetryingAction timeout(long timeout)
{
this.timeout = timeout; return this;
}
/** The expected timestamp of the returned column. */
public RetryingAction timestamp(long timestamp)
{
this.timestamp = timestamp; return this;
}
/** The exception classes that indicate success. */
public RetryingAction expecting(Class... tempExceptions)
{
this.expected.clear();
for (Class exclass : tempExceptions)
expected.add((Class<Exception>)exclass);
return this;
}
public void perform(ConsistencyLevel cl) throws AssertionError
{
long deadline = System.currentTimeMillis() + timeout;
int attempts = 0;
String template = "%s for " + this + " after %d attempt(s) with %d ms to spare.";
Exception e = null;
while(deadline > System.currentTimeMillis())
{
try
{
attempts++;
tryPerformAction(cl);
logger.info(String.format(template, "Succeeded", attempts, deadline - System.currentTimeMillis()));
return;
}
catch (Exception ex)
{
e = ex;
if (!expected.contains(ex.getClass()))
continue;
logger.info(String.format(template, "Caught expected exception: " + e, attempts, deadline - System.currentTimeMillis()));
return;
}
}
String err = String.format(template, "Caught unexpected: " + e, attempts, deadline - System.currentTimeMillis());
logger.error(err, e);
throw new AssertionError(err);
}
public String toString()
{
return this.getClass().getSimpleName() + "(" + key + "," + name + ")";
}
protected abstract void tryPerformAction(ConsistencyLevel cl) throws Exception;
}
protected List<ColumnOrSuperColumn> get_slice(Cassandra.Client client, ByteBuffer key, String cf, ConsistencyLevel cl)
throws InvalidRequestException, UnavailableException, TimedOutException, TException
{
SlicePredicate sp = new SlicePredicate();
sp.setSlice_range(
new SliceRange(
ByteBuffer.wrap(new byte[0]),
ByteBuffer.wrap(new byte[0]),
false,
1000
)
);
return client.get_slice(key, new ColumnParent(cf), sp, cl);
}
protected void assertColumnEqual(String name, String value, long timestamp, Column col)
{
assertEquals(ByteBuffer.wrap(name.getBytes()), col.name);
assertEquals(ByteBuffer.wrap(value.getBytes()), col.value);
assertEquals(timestamp, col.timestamp);
}
protected List<InetAddress> endpointsForKey(InetAddress seed, ByteBuffer key, String keyspace)
throws IOException
{
RingCache ring = new RingCache(keyspace, new RandomPartitioner(), seed.getHostAddress(), 9160);
List<InetAddress> privateendpoints = ring.getEndpoint(key);
List<InetAddress> endpoints = new ArrayList<InetAddress>();
for (InetAddress endpoint : privateendpoints)
{
endpoints.add(controller.getPublicHost(endpoint));
}
return endpoints;
}
protected InetAddress nonEndpointForKey(InetAddress seed, ByteBuffer key, String keyspace)
throws IOException
{
List<InetAddress> endpoints = endpointsForKey(seed, key, keyspace);
for (InetAddress host : controller.getHosts())
{
if (!endpoints.contains(host))
{
return host;
}
}
return null;
}
}
| |
package org.hisp.dhis.sms.outcoming;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.hisp.dhis.i18n.I18n;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.oust.manager.SelectionTreeManager;
import org.hisp.dhis.scheduling.TaskCategory;
import org.hisp.dhis.scheduling.TaskId;
import org.hisp.dhis.sms.outbound.OutboundSmsTransportService;
import org.hisp.dhis.sms.task.SendSmsTask;
import org.hisp.dhis.system.notification.Notifier;
import org.hisp.dhis.system.scheduling.Scheduler;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserGroup;
import org.hisp.dhis.user.UserGroupService;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.opensymphony.xwork2.Action;
/**
* @author Dang Duy Hieu
*/
public class ProcessingSendSMSAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
@Autowired
private SelectionTreeManager selectionTreeManager;
@Autowired
private CurrentUserService currentUserService;
@Autowired
private UserGroupService userGroupService;
@Autowired
private OutboundSmsTransportService transportService;
@Autowired
private Scheduler scheduler;
@Autowired
private Notifier notifier;
@Autowired
private SendSmsTask sendSmsTask;
// -------------------------------------------------------------------------
// Input & Output
// -------------------------------------------------------------------------
private String gatewayId;
public void setGatewayId( String gatewayId )
{
this.gatewayId = gatewayId;
}
private String smsSubject;
public void setSmsSubject( String smsSubject )
{
this.smsSubject = smsSubject;
}
private String text;
public void setText( String text )
{
this.text = text;
}
private String sendTarget;
public void setSendTarget( String sendTarget )
{
this.sendTarget = sendTarget;
}
private Integer userGroup;
public void setUserGroup( Integer userGroup )
{
this.userGroup = userGroup;
}
private Set<String> recipients = new HashSet<>();
public void setRecipients( Set<String> recipients )
{
this.recipients = recipients;
}
private String message = "success";
public String getMessage()
{
return message;
}
// -------------------------------------------------------------------------
// I18n
// -------------------------------------------------------------------------
private I18n i18n;
public void setI18n( I18n i18n )
{
this.i18n = i18n;
}
// -------------------------------------------------------------------------
// Action Implementation
// -------------------------------------------------------------------------
@Override
@SuppressWarnings( "unchecked" )
public String execute()
throws Exception
{
gatewayId = transportService.getDefaultGateway();
if ( gatewayId == null || gatewayId.trim().length() == 0 )
{
message = i18n.getString( "please_select_a_gateway_type_to_send_sms" );
return ERROR;
}
if ( text == null || text.trim().length() == 0 )
{
message = i18n.getString( "no_message" );
return ERROR;
}
User currentUser = currentUserService.getCurrentUser();
List<User> recipientsList = new ArrayList<>();
// Set<User> recipientsList = new HashSet<User>();
if ( sendTarget != null && sendTarget.equals( "phone" ) )
{
ObjectMapper mapper = new ObjectMapper().setVisibility( PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY );
mapper.disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES );
recipients = mapper.readValue( recipients.iterator().next(), Set.class );
for ( String each : recipients )
{
if ( !each.startsWith( "+" ) )
{
each = "+" + each;
}
User user = new User();
user.setPhoneNumber( each );
recipientsList.add( user );
}
// message = messageSender.sendMessage( smsSubject, smsMessage,
// currentUser, true, recipients, gatewayId );
}
else if ( sendTarget.equals( "userGroup" ) )
{
UserGroup group = userGroupService.getUserGroup( userGroup );
if ( group == null )
{
message = i18n.getString( "selected_user_group_is_unavailable" );
return ERROR;
}
if ( group.getMembers() == null || group.getMembers().isEmpty() )
{
message = i18n.getString( "selected_user_group_has_no_member" );
return ERROR;
}
recipientsList = new ArrayList<>( group.getMembers());
}
else if ( sendTarget.equals( "user" ) )
{
Collection<OrganisationUnit> units = selectionTreeManager.getReloadedSelectedOrganisationUnits();
if ( units != null && !units.isEmpty() )
{
for ( OrganisationUnit unit : units )
{
recipientsList.addAll( unit.getUsers() );
}
if ( recipientsList.isEmpty() )
{
message = i18n.getString( "there_is_no_user_assigned_to_selected_units" );
return ERROR;
}
// message = messageSender.sendMessage( smsSubject, smsMessage,
// currentUser, false, users, gatewayId );
}
}
else if ( sendTarget.equals( "unit" ) )
{
for ( OrganisationUnit unit : selectionTreeManager.getSelectedOrganisationUnits() )
{
if ( unit.getPhoneNumber() != null && !unit.getPhoneNumber().isEmpty() )
{
User user = new User();
user.setPhoneNumber( unit.getPhoneNumber() );
recipientsList.add( user );
}
}
if ( recipientsList.isEmpty() )
{
message = i18n.getString( "selected_units_have_no_phone_number" );
return ERROR;
}
}
TaskId taskId = new TaskId( TaskCategory.SENDING_SMS, currentUser );
notifier.clear( taskId );
sendSmsTask.setTaskId( taskId );
sendSmsTask.setCurrentUser( currentUser );
sendSmsTask.setRecipientsList( recipientsList );
sendSmsTask.setSmsSubject( smsSubject );
sendSmsTask.setText( text );
scheduler.executeTask( sendSmsTask );
if ( message != null && !message.equals( "success" ) )
{
message = i18n.getString( message );
return ERROR;
}
if ( message == null )
{
message = "An inter error occurs, please contact your administration";
return ERROR;
}
return SUCCESS;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.hints;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.UUID;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.collect.ImmutableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.ParameterizedClass;
import org.apache.cassandra.metrics.HintedHandoffMetrics;
import org.apache.cassandra.metrics.StorageMetrics;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.StorageService;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static com.google.common.collect.Iterables.size;
/**
* A singleton-ish wrapper over various hints components:
* - a catalog of all hints stores
* - a single-threaded write executor
* - a multi-threaded dispatch executor
* - the buffer pool for writing hints into
*
* The front-end for everything hints related.
*/
public final class HintsService implements HintsServiceMBean
{
private static final Logger logger = LoggerFactory.getLogger(HintsService.class);
public static final HintsService instance = new HintsService();
private static final String MBEAN_NAME = "org.apache.cassandra.hints:type=HintsService";
private static final int MIN_BUFFER_SIZE = 32 << 20;
static final ImmutableMap<String, Object> EMPTY_PARAMS = ImmutableMap.of();
private final HintsCatalog catalog;
private final HintsWriteExecutor writeExecutor;
private final HintsBufferPool bufferPool;
private final HintsDispatchExecutor dispatchExecutor;
private final AtomicBoolean isDispatchPaused;
private volatile boolean isShutDown = false;
private final ScheduledFuture triggerFlushingFuture;
private volatile ScheduledFuture triggerDispatchFuture;
public final HintedHandoffMetrics metrics;
private HintsService()
{
File hintsDirectory = DatabaseDescriptor.getHintsDirectory();
int maxDeliveryThreads = DatabaseDescriptor.getMaxHintsDeliveryThreads();
catalog = HintsCatalog.load(hintsDirectory, createDescriptorParams());
writeExecutor = new HintsWriteExecutor(catalog);
int bufferSize = Math.max(DatabaseDescriptor.getMaxMutationSize() * 2, MIN_BUFFER_SIZE);
bufferPool = new HintsBufferPool(bufferSize, writeExecutor::flushBuffer);
isDispatchPaused = new AtomicBoolean(true);
dispatchExecutor = new HintsDispatchExecutor(hintsDirectory, maxDeliveryThreads, isDispatchPaused);
// periodically empty the current content of the buffers
int flushPeriod = DatabaseDescriptor.getHintsFlushPeriodInMS();
triggerFlushingFuture = ScheduledExecutors.optionalTasks.scheduleWithFixedDelay(() -> writeExecutor.flushBufferPool(bufferPool),
flushPeriod,
flushPeriod,
TimeUnit.MILLISECONDS);
metrics = new HintedHandoffMetrics();
}
private static ImmutableMap<String, Object> createDescriptorParams()
{
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
ParameterizedClass compressionConfig = DatabaseDescriptor.getHintsCompression();
if (compressionConfig != null)
{
ImmutableMap.Builder<String, Object> compressorParams = ImmutableMap.builder();
compressorParams.put(ParameterizedClass.CLASS_NAME, compressionConfig.class_name);
if (compressionConfig.parameters != null)
{
compressorParams.put(ParameterizedClass.PARAMETERS, compressionConfig.parameters);
}
builder.put(HintsDescriptor.COMPRESSION, compressorParams.build());
}
return builder.build();
}
public void registerMBean()
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(this, new ObjectName(MBEAN_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
/**
* Write a hint for a iterable of nodes.
*
* @param hostIds host ids of the hint's target nodes
* @param hint the hint to store
*/
public void write(Iterable<UUID> hostIds, Hint hint)
{
if (isShutDown)
throw new IllegalStateException("HintsService is shut down and can't accept new hints");
// we have to make sure that the HintsStore instances get properly initialized - otherwise dispatch will not trigger
catalog.maybeLoadStores(hostIds);
if (hint.isLive())
bufferPool.write(hostIds, hint);
StorageMetrics.totalHints.inc(size(hostIds));
}
/**
* Write a hint for a single node.
*
* @param hostId host id of the hint's target node
* @param hint the hint to store
*/
public void write(UUID hostId, Hint hint)
{
write(Collections.singleton(hostId), hint);
}
/**
* Write a hint for all replicas. Used to re-dispatch hints whose destination is either missing or no longer correct.
*/
void writeForAllReplicas(Hint hint)
{
String keyspaceName = hint.mutation.getKeyspaceName();
Token token = hint.mutation.key().getToken();
Iterable<UUID> hostIds =
transform(filter(StorageService.instance.getNaturalAndPendingEndpoints(keyspaceName, token), StorageProxy::shouldHint),
StorageService.instance::getHostIdForEndpoint);
write(hostIds, hint);
}
/**
* Flush the buffer pool for the selected target nodes, then fsync their writers.
*
* @param hostIds host ids of the nodes to flush and fsync hints for
*/
public void flushAndFsyncBlockingly(Iterable<UUID> hostIds)
{
Iterable<HintsStore> stores = transform(hostIds, catalog::get);
writeExecutor.flushBufferPool(bufferPool, stores);
writeExecutor.fsyncWritersBlockingly(stores);
}
public synchronized void startDispatch()
{
if (isShutDown)
throw new IllegalStateException("HintsService is shut down and cannot be restarted");
isDispatchPaused.set(false);
HintsDispatchTrigger trigger = new HintsDispatchTrigger(catalog, writeExecutor, dispatchExecutor, isDispatchPaused);
// triggering hint dispatch is now very cheap, so we can do it more often - every 10 seconds vs. every 10 minutes,
// previously; this reduces mean time to delivery, and positively affects batchlog delivery latencies, too
triggerDispatchFuture = ScheduledExecutors.scheduledTasks.scheduleWithFixedDelay(trigger, 10, 10, TimeUnit.SECONDS);
}
public void pauseDispatch()
{
logger.info("Paused hints dispatch");
isDispatchPaused.set(true);
}
public void resumeDispatch()
{
logger.info("Resumed hints dispatch");
isDispatchPaused.set(false);
}
/**
* Gracefully and blockingly shut down the service.
*
* Will abort dispatch sessions that are currently in progress (which is okay, it's idempotent),
* and make sure the buffers are flushed, hints files written and fsynced.
*/
public synchronized void shutdownBlocking()
{
if (isShutDown)
throw new IllegalStateException("HintsService has already been shut down");
isShutDown = true;
if (triggerDispatchFuture != null)
triggerDispatchFuture.cancel(false);
pauseDispatch();
triggerFlushingFuture.cancel(false);
writeExecutor.flushBufferPool(bufferPool);
writeExecutor.closeAllWriters();
dispatchExecutor.shutdownBlocking();
writeExecutor.shutdownBlocking();
}
/**
* Deletes all hints for all destinations. Doesn't make snapshots - should be used with care.
*/
public void deleteAllHints()
{
catalog.deleteAllHints();
}
/**
* Deletes all hints for the provided destination. Doesn't make snapshots - should be used with care.
*
* @param address inet address of the target node - encoded as a string for easier JMX consumption
*/
public void deleteAllHintsForEndpoint(String address)
{
InetAddress target;
try
{
target = InetAddress.getByName(address);
}
catch (UnknownHostException e)
{
throw new IllegalArgumentException(e);
}
deleteAllHintsForEndpoint(target);
}
/**
* Deletes all hints for the provided destination. Doesn't make snapshots - should be used with care.
*
* @param target inet address of the target node
*/
public void deleteAllHintsForEndpoint(InetAddress target)
{
UUID hostId = StorageService.instance.getHostIdForEndpoint(target);
if (hostId == null)
throw new IllegalArgumentException("Can't delete hints for unknown address " + target);
catalog.deleteAllHints(hostId);
}
/**
* Cleans up hints-related state after a node with id = hostId left.
*
* Dispatcher should stop itself (isHostAlive() will start returning false for the leaving host), but we'll wait for
* completion anyway.
*
* We should also flush the buffer is there are any thints for the node there, and close the writer (if any),
* so that we don't leave any hint files lying around.
*
* Once that is done, we can simply delete all hint files and remove the host id from the catalog.
*
* The worst that can happen if we don't get everything right is a hints file (or two) remaining undeleted.
*
* @param hostId id of the node being excised
*/
public void excise(UUID hostId)
{
HintsStore store = catalog.get(hostId);
if (store == null)
return;
// flush the buffer and then close the writer for the excised host id, to make sure that no new files will appear
// for this host id after we are done
Future flushFuture = writeExecutor.flushBufferPool(bufferPool, Collections.singleton(store));
Future closeFuture = writeExecutor.closeWriter(store);
try
{
flushFuture.get();
closeFuture.get();
}
catch (InterruptedException | ExecutionException e)
{
throw new RuntimeException(e);
}
// wait for the current dispatch session to end (if any), so that the currently dispatched file gets removed
dispatchExecutor.completeDispatchBlockingly(store);
// delete all the hints files and remove the HintsStore instance from the map in the catalog
catalog.exciseStore(hostId);
}
/**
* Transfer all local hints to the hostId supplied by hostIdSupplier
*
* Flushes the buffer to make sure all hints are on disk and closes the hint writers
* so we don't leave any hint files around.
*
* After that, we serially dispatch all the hints in the HintsCatalog.
*
* If we fail delivering all hints, we will ask the hostIdSupplier for a new target host
* and retry delivering any remaining hints there, once, with a delay of 10 seconds before retrying.
*
* @param hostIdSupplier supplier of stream target host ids. This is generally
* the closest one according to the DynamicSnitch
* @return When this future is done, it either has streamed all hints to remote nodes or has failed with a proper
* log message
*/
public Future transferHints(Supplier<UUID> hostIdSupplier)
{
Future flushFuture = writeExecutor.flushBufferPool(bufferPool);
Future closeFuture = writeExecutor.closeAllWriters();
try
{
flushFuture.get();
closeFuture.get();
}
catch (InterruptedException | ExecutionException e)
{
throw new RuntimeException(e);
}
// unpause dispatch, or else transfer() will return immediately
resumeDispatch();
// wait for the current dispatch session to end
catalog.stores().forEach(dispatchExecutor::completeDispatchBlockingly);
return dispatchExecutor.transfer(catalog, hostIdSupplier);
}
HintsCatalog getCatalog()
{
return catalog;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.util;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.icons.AllIcons;
import com.intellij.ide.*;
import com.intellij.ide.actions.ViewStructureAction;
import com.intellij.ide.dnd.aware.DnDAwareTree;
import com.intellij.ide.structureView.ModelListener;
import com.intellij.ide.structureView.StructureView;
import com.intellij.ide.structureView.StructureViewModel;
import com.intellij.ide.structureView.impl.common.PsiTreeElementBase;
import com.intellij.ide.structureView.newStructureView.StructureViewComponent;
import com.intellij.ide.structureView.newStructureView.TreeActionWrapper;
import com.intellij.ide.structureView.newStructureView.TreeActionsOwner;
import com.intellij.ide.structureView.newStructureView.TreeModelWrapper;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.ide.util.treeView.smartTree.*;
import com.intellij.navigation.LocationPresentation;
import com.intellij.openapi.MnemonicHelper;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.codeStyle.MinusculeMatcher;
import com.intellij.psi.codeStyle.NameUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.ui.popup.HintUpdateSupply;
import com.intellij.ui.popup.PopupUpdateProcessor;
import com.intellij.ui.speedSearch.ElementFilter;
import com.intellij.ui.tree.AsyncTreeModel;
import com.intellij.ui.tree.StructureTreeModel;
import com.intellij.ui.tree.TreeVisitor;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.ui.treeStructure.filtered.FilteringTreeStructure;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBIterable;
import com.intellij.util.text.TextRangeUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.TextTransferable;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.xml.util.XmlStringUtil;
import consulo.disposer.Disposable;
import consulo.disposer.Disposer;
import consulo.logging.Logger;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import org.jetbrains.concurrency.Promises;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.*;
import java.util.List;
import java.util.*;
import java.util.function.BiPredicate;
/**
* @author Konstantin Bulenkov
*/
public class FileStructurePopup implements Disposable, TreeActionsOwner {
private static final Logger LOG = Logger.getInstance(FileStructurePopup.class);
private static final String NARROW_DOWN_PROPERTY_KEY = "FileStructurePopup.narrowDown";
private final Project myProject;
private final FileEditor myFileEditor;
private final StructureViewModel myTreeModelWrapper;
private final StructureViewModel myTreeModel;
private final TreeStructureActionsOwner myTreeActionsOwner;
private JBPopup myPopup;
private String myTitle;
private final Tree myTree;
private final SmartTreeStructure myTreeStructure;
private final FilteringTreeStructure myFilteringStructure;
private final AsyncTreeModel myAsyncTreeModel;
private final StructureTreeModel myStructureTreeModel;
private final TreeSpeedSearch mySpeedSearch;
private final Object myInitialElement;
private final Map<Class, JBCheckBox> myCheckBoxes = new HashMap<>();
private final List<JBCheckBox> myAutoClicked = new ArrayList<>();
private String myTestSearchFilter;
private final ActionCallback myTreeHasBuilt = new ActionCallback();
private final List<Pair<String, JBCheckBox>> myTriggeredCheckboxes = new ArrayList<>();
private final TreeExpander myTreeExpander;
private final CopyPasteDelegator myCopyPasteDelegator;
private boolean myCanClose = true;
private boolean myDisposed;
/**
* @noinspection unused
*/
@Deprecated
public FileStructurePopup(@Nonnull Project project, @Nonnull FileEditor fileEditor, @Nonnull StructureView structureView, boolean applySortAndFilter) {
this(project, fileEditor, ViewStructureAction.createStructureViewModel(project, fileEditor, structureView));
Disposer.register(this, structureView);
}
public FileStructurePopup(@Nonnull Project project, @Nonnull FileEditor fileEditor, @Nonnull StructureViewModel treeModel) {
myProject = project;
myFileEditor = fileEditor;
myTreeModel = treeModel;
//Stop code analyzer to speedup EDT
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
IdeFocusManager.getInstance(myProject).typeAheadUntil(myTreeHasBuilt, "FileStructurePopup");
myTreeActionsOwner = new TreeStructureActionsOwner(myTreeModel);
myTreeActionsOwner.setActionIncluded(Sorter.ALPHA_SORTER, true);
myTreeModelWrapper = new TreeModelWrapper(myTreeModel, myTreeActionsOwner);
Disposer.register(this, myTreeModelWrapper);
myTreeStructure = new SmartTreeStructure(project, myTreeModelWrapper) {
@Override
public void rebuildTree() {
if (!ApplicationManager.getApplication().isUnitTestMode() && myPopup.isDisposed()) {
return;
}
ProgressManager.getInstance().computePrioritized(() -> {
super.rebuildTree();
myFilteringStructure.rebuild();
return null;
});
}
@Override
public boolean isToBuildChildrenInBackground(@Nonnull Object element) {
return getRootElement() == element;
}
@Nonnull
@Override
protected TreeElementWrapper createTree() {
return StructureViewComponent.createWrapper(myProject, myModel.getRoot(), myModel);
}
@NonNls
@Override
public String toString() {
return "structure view tree structure(model=" + myTreeModelWrapper + ")";
}
};
FileStructurePopupFilter filter = new FileStructurePopupFilter();
myFilteringStructure = new FilteringTreeStructure(filter, myTreeStructure, false);
myStructureTreeModel = new StructureTreeModel<>(myFilteringStructure, this);
myAsyncTreeModel = new AsyncTreeModel(myStructureTreeModel, this);
myAsyncTreeModel.setRootImmediately(myStructureTreeModel.getRootImmediately());
myTree = new MyTree(myAsyncTreeModel);
StructureViewComponent.registerAutoExpandListener(myTree, myTreeModel);
ModelListener modelListener = () -> rebuild(false);
myTreeModel.addModelListener(modelListener);
Disposer.register(this, () -> myTreeModel.removeModelListener(modelListener));
myTree.setCellRenderer(new NodeRenderer());
myProject.getMessageBus().connect(this).subscribe(UISettingsListener.TOPIC, o -> rebuild(false));
myTree.setTransferHandler(new TransferHandler() {
@Override
public boolean importData(@Nonnull TransferSupport support) {
String s = CopyPasteManager.getInstance().getContents(DataFlavor.stringFlavor);
if (s != null && !mySpeedSearch.isPopupActive()) {
mySpeedSearch.showPopup(s);
return true;
}
return false;
}
@Nullable
@Override
protected Transferable createTransferable(JComponent component) {
JBIterable<Pair<FilteringTreeStructure.FilteringNode, PsiElement>> pairs = JBIterable.of(myTree.getSelectionPaths()).filterMap(TreeUtil::getLastUserObject).filter(FilteringTreeStructure.FilteringNode.class)
.filterMap(o -> o.getDelegate() instanceof PsiElement ? Pair.create(o, (PsiElement)o.getDelegate()) : null).collect();
if (pairs.isEmpty()) return null;
Set<PsiElement> psiSelection = pairs.map(Functions.pairSecond()).toSet();
String text = StringUtil.join(pairs, pair -> {
PsiElement psi = pair.second;
String defaultPresentation = pair.first.getPresentation().getPresentableText();
if (psi == null) return defaultPresentation;
for (PsiElement p = psi.getParent(); p != null; p = p.getParent()) {
if (psiSelection.contains(p)) return null;
}
return ObjectUtils.chooseNotNull(psi.getText(), defaultPresentation);
}, "\n");
String htmlText = "<body>\n" + text + "\n</body>";
return new TextTransferable(XmlStringUtil.wrapInHtml(htmlText), text);
}
@Override
public int getSourceActions(JComponent component) {
return COPY;
}
});
mySpeedSearch = new MyTreeSpeedSearch();
mySpeedSearch.setComparator(new SpeedSearchComparator(false, true) {
@Nonnull
@Override
protected MinusculeMatcher createMatcher(@Nonnull String pattern) {
return NameUtil.buildMatcher(pattern).withSeparators(" ()").build();
}
});
myTreeExpander = new DefaultTreeExpander(myTree);
myCopyPasteDelegator = new CopyPasteDelegator(myProject, myTree);
myInitialElement = myTreeModel.getCurrentEditorElement();
TreeUtil.installActions(myTree);
}
public void show() {
JComponent panel = createCenterPanel();
MnemonicHelper.init(panel);
myTree.addTreeSelectionListener(__ -> {
if (myPopup.isVisible()) {
PopupUpdateProcessor updateProcessor = myPopup.getUserData(PopupUpdateProcessor.class);
if (updateProcessor != null) {
AbstractTreeNode node = getSelectedNode();
updateProcessor.updatePopup(node);
}
}
});
myPopup = JBPopupFactory.getInstance().createComponentPopupBuilder(panel, myTree).setTitle(myTitle).setResizable(true).setModalContext(false).setFocusable(true).setRequestFocus(true)
.setMovable(true).setBelongsToGlobalPopupStack(true)
//.setCancelOnClickOutside(false) //for debug and snapshots
.setCancelOnOtherWindowOpen(true).setCancelKeyEnabled(false).setDimensionServiceKey(null, getDimensionServiceKey(), true).setCancelCallback(() -> myCanClose).setNormalWindowLevel(true)
.createPopup();
Disposer.register(myPopup, this);
Disposer.register(myPopup, () -> {
if (!myTreeHasBuilt.isDone()) {
myTreeHasBuilt.setRejected();
}
});
myTree.getEmptyText().setText("Loading...");
myPopup.showCenteredInCurrentWindow(myProject);
((AbstractPopup)myPopup).setShowHints(true);
IdeFocusManager.getInstance(myProject).requestFocus(myTree, true);
rebuildAndSelect(false, myInitialElement).onProcessed(path -> UIUtil.invokeLaterIfNeeded(() -> {
TreeUtil.ensureSelection(myTree);
myTreeHasBuilt.setDone();
installUpdater();
}));
}
private void installUpdater() {
if (ApplicationManager.getApplication().isUnitTestMode() || myPopup.isDisposed()) {
return;
}
Alarm alarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, myPopup);
alarm.addRequest(new Runnable() {
String filter = "";
@Override
public void run() {
alarm.cancelAllRequests();
String prefix = mySpeedSearch.getEnteredPrefix();
myTree.getEmptyText().setText(StringUtil.isEmpty(prefix) ? "Structure is empty" : "'" + prefix + "' not found");
if (prefix == null) prefix = "";
if (!filter.equals(prefix)) {
boolean isBackspace = prefix.length() < filter.length();
filter = prefix;
rebuild(true).onProcessed(ignore -> UIUtil.invokeLaterIfNeeded(() -> {
if (isDisposed()) return;
TreeUtil.promiseExpandAll(myTree);
if (isBackspace && handleBackspace(filter)) {
return;
}
if (myFilteringStructure.getRootElement().getChildren().length == 0) {
for (JBCheckBox box : myCheckBoxes.values()) {
if (!box.isSelected()) {
myAutoClicked.add(box);
myTriggeredCheckboxes.add(0, Pair.create(filter, box));
box.doClick();
filter = "";
break;
}
}
}
}));
}
if (!alarm.isDisposed()) {
alarm.addRequest(this, 300);
}
}
}, 300);
}
private boolean handleBackspace(String filter) {
boolean clicked = false;
Iterator<Pair<String, JBCheckBox>> iterator = myTriggeredCheckboxes.iterator();
while (iterator.hasNext()) {
Pair<String, JBCheckBox> next = iterator.next();
if (next.getFirst().length() < filter.length()) break;
iterator.remove();
next.getSecond().doClick();
clicked = true;
}
return clicked;
}
@Nonnull
public Promise<TreePath> select(Object element) {
int[] stage = {1, 0}; // 1 - first pass, 2 - optimization applied, 3 - retry w/o optimization
TreePath[] deepestPath = {null};
TreeVisitor visitor = path -> {
Object last = path.getLastPathComponent();
Object userObject = StructureViewComponent.unwrapNavigatable(last);
Object value = StructureViewComponent.unwrapValue(last);
if (Comparing.equal(value, element) || userObject instanceof AbstractTreeNode && ((AbstractTreeNode)userObject).canRepresent(element)) {
return TreeVisitor.Action.INTERRUPT;
}
if (value instanceof PsiElement && element instanceof PsiElement) {
if (PsiTreeUtil.isAncestor((PsiElement)value, (PsiElement)element, true)) {
int count = path.getPathCount();
if (stage[1] == 0 || stage[1] < count) {
stage[1] = count;
deepestPath[0] = path;
}
}
else if (stage[0] != 3) {
stage[0] = 2;
return TreeVisitor.Action.SKIP_CHILDREN;
}
}
return TreeVisitor.Action.CONTINUE;
};
Function<TreePath, Promise<TreePath>> action = path -> {
myTree.expandPath(path);
TreeUtil.selectPath(myTree, path);
TreeUtil.ensureSelection(myTree);
return Promises.resolvedPromise(path);
};
Function<TreePath, Promise<TreePath>> fallback = new Function<TreePath, Promise<TreePath>>() {
@Override
public Promise<TreePath> fun(TreePath path) {
if (path == null && stage[0] == 2) {
// Some structure views merge unrelated psi elements into a structure node (MarkdownStructureViewModel).
// So turn off the isAncestor() optimization and retry once.
stage[0] = 3;
return myAsyncTreeModel.accept(visitor).thenAsync(this);
}
else {
TreePath adjusted = path == null ? deepestPath[0] : path;
if (path == null && adjusted != null && element instanceof PsiElement) {
Object minChild = findClosestPsiElement((PsiElement)element, adjusted, myAsyncTreeModel);
if (minChild != null) adjusted = adjusted.pathByAddingChild(minChild);
}
return adjusted == null ? Promises.rejectedPromise() : action.fun(adjusted);
}
}
};
return myAsyncTreeModel.accept(visitor).thenAsync(fallback);
}
@TestOnly
public AsyncPromise<Void> rebuildAndUpdate() {
AsyncPromise<Void> result = new AsyncPromise<>();
TreeVisitor visitor = path -> {
AbstractTreeNode node = TreeUtil.getLastUserObject(AbstractTreeNode.class, path);
if (node != null) node.update();
return TreeVisitor.Action.CONTINUE;
};
rebuild(false).onProcessed(ignore1 -> myAsyncTreeModel.accept(visitor).onProcessed(ignore2 -> result.setResult(null)));
return result;
}
public boolean isDisposed() {
return myDisposed;
}
@Override
public void dispose() {
myDisposed = true;
}
private static boolean isShouldNarrowDown() {
return PropertiesComponent.getInstance().getBoolean(NARROW_DOWN_PROPERTY_KEY, true);
}
@NonNls
protected static String getDimensionServiceKey() {
return "StructurePopup";
}
@Nullable
public PsiElement getCurrentElement(@Nullable final PsiFile psiFile) {
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
Object elementAtCursor = myTreeModelWrapper.getCurrentEditorElement();
if (elementAtCursor instanceof PsiElement) {
return (PsiElement)elementAtCursor;
}
if (psiFile != null && myFileEditor instanceof TextEditor) {
return psiFile.getViewProvider().findElementAt(((TextEditor)myFileEditor).getEditor().getCaretModel().getOffset());
}
return null;
}
public JComponent createCenterPanel() {
List<FileStructureFilter> fileStructureFilters = new ArrayList<>();
List<FileStructureNodeProvider> fileStructureNodeProviders = new ArrayList<>();
if (myTreeActionsOwner != null) {
for (Filter filter : myTreeModel.getFilters()) {
if (filter instanceof FileStructureFilter) {
FileStructureFilter fsFilter = (FileStructureFilter)filter;
myTreeActionsOwner.setActionIncluded(fsFilter, true);
fileStructureFilters.add(fsFilter);
}
}
if (myTreeModel instanceof ProvidingTreeModel) {
for (NodeProvider provider : ((ProvidingTreeModel)myTreeModel).getNodeProviders()) {
if (provider instanceof FileStructureNodeProvider) {
fileStructureNodeProviders.add((FileStructureNodeProvider)provider);
}
}
}
}
int checkBoxCount = fileStructureNodeProviders.size() + fileStructureFilters.size();
JPanel panel = new JPanel(new BorderLayout());
panel.setPreferredSize(JBUI.size(540, 500));
JPanel chkPanel = new JPanel(new GridLayout(0, checkBoxCount > 0 && checkBoxCount % 4 == 0 ? checkBoxCount / 2 : 3, JBUI.scale(UIUtil.DEFAULT_HGAP), 0));
chkPanel.setOpaque(false);
Shortcut[] F4 = ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet().getShortcuts();
Shortcut[] ENTER = CustomShortcutSet.fromString("ENTER").getShortcuts();
CustomShortcutSet shortcutSet = new CustomShortcutSet(ArrayUtil.mergeArrays(F4, ENTER));
new DumbAwareAction() {
@Override
public void actionPerformed(@Nonnull AnActionEvent e) {
boolean succeeded = navigateSelectedElement();
if (succeeded) {
unregisterCustomShortcutSet(panel);
}
}
}.registerCustomShortcutSet(shortcutSet, panel);
DumbAwareAction.create(e -> {
if (mySpeedSearch != null && mySpeedSearch.isPopupActive()) {
mySpeedSearch.hidePopup();
}
else {
myPopup.cancel();
}
}).registerCustomShortcutSet(CustomShortcutSet.fromString("ESCAPE"), myTree);
new ClickListener() {
@Override
public boolean onClick(@Nonnull MouseEvent e, int clickCount) {
TreePath path = myTree.getClosestPathForLocation(e.getX(), e.getY());
Rectangle bounds = path == null ? null : myTree.getPathBounds(path);
if (bounds == null || bounds.x > e.getX() || bounds.y > e.getY() || bounds.y + bounds.height < e.getY()) return false;
navigateSelectedElement();
return true;
}
}.installOn(myTree);
for (FileStructureFilter filter : fileStructureFilters) {
addCheckbox(chkPanel, filter);
}
for (FileStructureNodeProvider provider : fileStructureNodeProviders) {
addCheckbox(chkPanel, provider);
}
JPanel topPanel = new JPanel(new BorderLayout());
topPanel.add(chkPanel, BorderLayout.WEST);
topPanel.add(createSettingsButton(), BorderLayout.EAST);
topPanel.setBackground(JBUI.CurrentTheme.Popup.toolbarPanelColor());
Dimension prefSize = topPanel.getPreferredSize();
prefSize.height = JBUI.CurrentTheme.Popup.toolbarHeight();
topPanel.setPreferredSize(prefSize);
topPanel.setBorder(JBUI.Borders.emptyLeft(UIUtil.DEFAULT_HGAP));
panel.add(topPanel, BorderLayout.NORTH);
JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree);
scrollPane.setBorder(IdeBorderFactory.createBorder(JBUI.CurrentTheme.Popup.toolbarBorderColor(), SideBorder.TOP | SideBorder.BOTTOM));
panel.add(scrollPane, BorderLayout.CENTER);
DataManager.registerDataProvider(panel, dataId -> {
if (CommonDataKeys.PROJECT == dataId) {
return myProject;
}
if (PlatformDataKeys.FILE_EDITOR == dataId) {
return myFileEditor;
}
if (OpenFileDescriptor.NAVIGATE_IN_EDITOR == dataId) {
if (myFileEditor instanceof TextEditor) {
return ((TextEditor)myFileEditor).getEditor();
}
}
if (CommonDataKeys.PSI_ELEMENT == dataId) {
return getSelectedElements().filter(PsiElement.class).first();
}
if (LangDataKeys.PSI_ELEMENT_ARRAY == dataId) {
return PsiUtilCore.toPsiElementArray(getSelectedElements().filter(PsiElement.class).toList());
}
if (CommonDataKeys.NAVIGATABLE == dataId) {
return getSelectedElements().filter(Navigatable.class).first();
}
if (CommonDataKeys.NAVIGATABLE_ARRAY == dataId) {
List<Navigatable> result = getSelectedElements().filter(Navigatable.class).toList();
return result.isEmpty() ? null : result.toArray(new Navigatable[0]);
}
if (LangDataKeys.POSITION_ADJUSTER_POPUP == dataId) {
return myPopup;
}
if (PlatformDataKeys.COPY_PROVIDER == dataId) {
return myCopyPasteDelegator.getCopyProvider();
}
if (PlatformDataKeys.TREE_EXPANDER == dataId) {
return myTreeExpander;
}
return null;
});
panel.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
myPopup.cancel();
}
});
return panel;
}
@Nonnull
private JBIterable<Object> getSelectedElements() {
return JBIterable.of(myTree.getSelectionPaths()).filterMap(o -> StructureViewComponent.unwrapValue(o.getLastPathComponent()));
}
@Nonnull
private JComponent createSettingsButton() {
JLabel label = new JBLabel(AllIcons.General.GearPlain);
label.setBorder(JBUI.Borders.empty(0, 4));
label.setHorizontalAlignment(SwingConstants.RIGHT);
label.setVerticalAlignment(SwingConstants.CENTER);
List<AnAction> sorters = createSorters();
new ClickListener() {
@Override
public boolean onClick(@Nonnull MouseEvent event, int clickCount) {
DefaultActionGroup group = new DefaultActionGroup();
if (!sorters.isEmpty()) {
group.addAll(sorters);
group.addSeparator();
}
//addGroupers(group);
//addFilters(group);
group.add(new ToggleAction(IdeBundle.message("checkbox.narrow.down.on.typing")) {
@Override
public boolean isSelected(@Nonnull AnActionEvent e) {
return isShouldNarrowDown();
}
@Override
public void setSelected(@Nonnull AnActionEvent e, boolean state) {
PropertiesComponent.getInstance().setValue(NARROW_DOWN_PROPERTY_KEY, Boolean.toString(state));
if (mySpeedSearch.isPopupActive() && !StringUtil.isEmpty(mySpeedSearch.getEnteredPrefix())) {
rebuild(true);
}
}
});
DataManager dataManager = DataManager.getInstance();
ListPopup popup = JBPopupFactory.getInstance().createActionGroupPopup(null, group, dataManager.getDataContext(label), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false);
popup.addListener(new JBPopupListener() {
@Override
public void onClosed(@Nonnull LightweightWindowEvent event) {
myCanClose = true;
}
});
myCanClose = false;
popup.showUnderneathOf(label);
return true;
}
}.installOn(label);
return label;
}
private List<AnAction> createSorters() {
List<AnAction> actions = new ArrayList<>();
for (Sorter sorter : myTreeModel.getSorters()) {
if (sorter.isVisible()) {
actions.add(new MyTreeActionWrapper(sorter));
}
}
return actions;
}
@Nullable
private static Object findClosestPsiElement(@Nonnull PsiElement element, @Nonnull TreePath adjusted, @Nonnull TreeModel treeModel) {
TextRange range = element.getTextRange();
if (range == null) return null;
Object parent = adjusted.getLastPathComponent();
int minDistance = 0;
Object minChild = null;
for (int i = 0, count = treeModel.getChildCount(parent); i < count; i++) {
Object child = treeModel.getChild(parent, i);
Object value = StructureViewComponent.unwrapValue(child);
if (value instanceof StubBasedPsiElement && ((StubBasedPsiElement)value).getStub() != null) continue;
TextRange r = value instanceof PsiElement ? ((PsiElement)value).getTextRange() : null;
if (r == null) continue;
int distance = TextRangeUtil.getDistance(range, r);
if (minChild == null || distance < minDistance) {
minDistance = distance;
minChild = child;
}
}
return minChild;
}
private class MyTreeActionWrapper extends TreeActionWrapper {
private final TreeAction myAction;
MyTreeActionWrapper(TreeAction action) {
super(action, myTreeActionsOwner);
myAction = action;
myTreeActionsOwner.setActionIncluded(action, getDefaultValue(action));
}
@Override
public void update(@Nonnull AnActionEvent e) {
super.update(e);
e.getPresentation().setIcon(null);
}
@Override
public void setSelected(@Nonnull AnActionEvent e, boolean state) {
boolean actionState = TreeModelWrapper.shouldRevert(myAction) != state;
myTreeActionsOwner.setActionIncluded(myAction, actionState);
saveState(myAction, state);
rebuild(false).onProcessed(ignore -> {
if (mySpeedSearch.isPopupActive()) {
mySpeedSearch.refreshSelection();
}
});
}
}
@Nullable
private AbstractTreeNode getSelectedNode() {
TreePath path = myTree.getSelectionPath();
Object o = StructureViewComponent.unwrapNavigatable(path == null ? null : path.getLastPathComponent());
return o instanceof AbstractTreeNode ? (AbstractTreeNode)o : null;
}
private boolean navigateSelectedElement() {
AbstractTreeNode selectedNode = getSelectedNode();
if (ApplicationManager.getApplication().isInternal()) {
String enteredPrefix = mySpeedSearch.getEnteredPrefix();
String itemText = getSpeedSearchText(selectedNode);
if (StringUtil.isNotEmpty(enteredPrefix) && StringUtil.isNotEmpty(itemText)) {
LOG.info("Chosen in file structure popup by prefix '" + enteredPrefix + "': '" + itemText + "'");
}
}
Ref<Boolean> succeeded = new Ref<>();
CommandProcessor commandProcessor = CommandProcessor.getInstance();
commandProcessor.executeCommand(myProject, () -> {
if (selectedNode != null) {
if (selectedNode.canNavigateToSource()) {
selectedNode.navigate(true);
myPopup.cancel();
succeeded.set(true);
}
else {
succeeded.set(false);
}
}
else {
succeeded.set(false);
}
IdeDocumentHistory.getInstance(myProject).includeCurrentCommandAsNavigation();
}, "Navigate", null);
return succeeded.get();
}
private void addCheckbox(JPanel panel, TreeAction action) {
String text = action instanceof FileStructureFilter
? ((FileStructureFilter)action).getCheckBoxText()
: action instanceof FileStructureNodeProvider ? ((FileStructureNodeProvider)action).getCheckBoxText() : null;
if (text == null) return;
Shortcut[] shortcuts = extractShortcutFor(action);
JBCheckBox checkBox = new JBCheckBox();
checkBox.setOpaque(false);
UIUtil.applyStyle(UIUtil.ComponentStyle.SMALL, checkBox);
boolean selected = getDefaultValue(action);
checkBox.setSelected(selected);
boolean isRevertedStructureFilter = action instanceof FileStructureFilter && ((FileStructureFilter)action).isReverted();
myTreeActionsOwner.setActionIncluded(action, isRevertedStructureFilter != selected);
checkBox.addActionListener(__ -> {
boolean state = checkBox.isSelected();
if (!myAutoClicked.contains(checkBox)) {
saveState(action, state);
}
myTreeActionsOwner.setActionIncluded(action, isRevertedStructureFilter != state);
rebuild(false).onProcessed(ignore -> {
if (mySpeedSearch.isPopupActive()) {
mySpeedSearch.refreshSelection();
}
});
});
checkBox.setFocusable(false);
if (shortcuts.length > 0) {
text += " (" + KeymapUtil.getShortcutText(shortcuts[0]) + ")";
DumbAwareAction.create(e -> checkBox.doClick()).registerCustomShortcutSet(new CustomShortcutSet(shortcuts), myTree);
}
checkBox.setText(StringUtil.capitalize(StringUtil.trimStart(text.trim(), "Show ")));
panel.add(checkBox);
myCheckBoxes.put(action.getClass(), checkBox);
}
@Nonnull
private Promise<Void> rebuild(boolean refilterOnly) {
Object selection = JBIterable.of(myTree.getSelectionPaths()).filterMap(o -> StructureViewComponent.unwrapValue(o.getLastPathComponent())).first();
return rebuildAndSelect(refilterOnly, selection).then(o -> null);
}
@Nonnull
private Promise<TreePath> rebuildAndSelect(boolean refilterOnly, Object selection) {
AsyncPromise<TreePath> result = new AsyncPromise<>();
myStructureTreeModel.getInvoker().runOrInvokeLater(() -> {
if (refilterOnly) {
myFilteringStructure.refilter();
myStructureTreeModel.invalidate().onSuccess(
res -> (selection == null ? myAsyncTreeModel.accept(o -> TreeVisitor.Action.CONTINUE) : select(selection)).onError(ignore2 -> result.setError("rejected")).onSuccess(p -> UIUtil.invokeLaterIfNeeded(() -> {
TreeUtil.expand(getTree(), myTreeModel instanceof StructureViewCompositeModel ? 3 : 2);
TreeUtil.ensureSelection(myTree);
mySpeedSearch.refreshSelection();
result.setResult(p);
})));
}
else {
myTreeStructure.rebuildTree();
myStructureTreeModel.invalidate().onSuccess(res -> rebuildAndSelect(true, selection).processed(result));
}
});
return result;
}
@Nonnull
static Shortcut[] extractShortcutFor(@Nonnull TreeAction action) {
if (action instanceof ActionShortcutProvider) {
String actionId = ((ActionShortcutProvider)action).getActionIdForShortcut();
return KeymapUtil.getActiveKeymapShortcuts(actionId).getShortcuts();
}
return action instanceof FileStructureFilter ? ((FileStructureFilter)action).getShortcut() : ((FileStructureNodeProvider)action).getShortcut();
}
private static boolean getDefaultValue(TreeAction action) {
String propertyName = action instanceof PropertyOwner ? ((PropertyOwner)action).getPropertyName() : action.getName();
return PropertiesComponent.getInstance().getBoolean(TreeStructureUtil.getPropertyName(propertyName), Sorter.ALPHA_SORTER.equals(action));
}
private static void saveState(TreeAction action, boolean state) {
String propertyName = action instanceof PropertyOwner ? ((PropertyOwner)action).getPropertyName() : action.getName();
PropertiesComponent.getInstance().setValue(TreeStructureUtil.getPropertyName(propertyName), state);
}
public void setTitle(String title) {
myTitle = title;
}
@Nonnull
public Tree getTree() {
return myTree;
}
@TestOnly
public TreeSpeedSearch getSpeedSearch() {
return mySpeedSearch;
}
@TestOnly
public void setSearchFilterForTests(String filter) {
myTestSearchFilter = filter;
}
public void setTreeActionState(Class<? extends TreeAction> action, boolean state) {
JBCheckBox checkBox = myCheckBoxes.get(action);
if (checkBox != null) {
checkBox.setSelected(state);
for (ActionListener listener : checkBox.getActionListeners()) {
listener.actionPerformed(new ActionEvent(this, 1, ""));
}
}
}
@Nullable
public static String getSpeedSearchText(Object object) {
String text = String.valueOf(object);
Object value = StructureViewComponent.unwrapWrapper(object);
if (text != null) {
if (value instanceof PsiTreeElementBase && ((PsiTreeElementBase)value).isSearchInLocationString()) {
String locationString = ((PsiTreeElementBase)value).getLocationString();
if (!StringUtil.isEmpty(locationString)) {
String locationPrefix = null;
String locationSuffix = null;
if (value instanceof LocationPresentation) {
locationPrefix = ((LocationPresentation)value).getLocationPrefix();
locationSuffix = ((LocationPresentation)value).getLocationSuffix();
}
return text +
StringUtil.notNullize(locationPrefix, LocationPresentation.DEFAULT_LOCATION_PREFIX) +
locationString +
StringUtil.notNullize(locationSuffix, LocationPresentation.DEFAULT_LOCATION_SUFFIX);
}
}
return text;
}
// NB!: this point is achievable if the following method returns null
// see com.intellij.ide.util.treeView.NodeDescriptor.toString
if (value instanceof TreeElement) {
return ReadAction.compute(() -> ((TreeElement)value).getPresentation().getPresentableText());
}
return null;
}
@Override
public void setActionActive(String name, boolean state) {
}
@Override
public boolean isActionActive(String name) {
return false;
}
private class FileStructurePopupFilter implements ElementFilter {
private String myLastFilter;
private final Set<Object> myVisibleParents = new HashSet<>();
private final boolean isUnitTest = ApplicationManager.getApplication().isUnitTestMode();
@Override
public boolean shouldBeShowing(Object value) {
if (!isShouldNarrowDown()) return true;
String filter = getSearchPrefix();
if (!StringUtil.equals(myLastFilter, filter)) {
myVisibleParents.clear();
myLastFilter = filter;
}
if (filter != null) {
if (myVisibleParents.contains(value)) {
return true;
}
String text = getSpeedSearchText(value);
if (text == null) return false;
if (matches(filter, text)) {
Object o = value;
while (o instanceof FilteringTreeStructure.FilteringNode && (o = ((FilteringTreeStructure.FilteringNode)o).getParent()) != null) {
myVisibleParents.add(o);
}
return true;
}
else {
return false;
}
}
return true;
}
private boolean matches(@Nonnull String filter, @Nonnull String text) {
return (isUnitTest || mySpeedSearch.isPopupActive()) && StringUtil.isNotEmpty(filter) && mySpeedSearch.getComparator().matchingFragments(filter, text) != null;
}
}
@Nullable
private String getSearchPrefix() {
if (ApplicationManager.getApplication().isUnitTestMode()) return myTestSearchFilter;
return mySpeedSearch != null && !StringUtil.isEmpty(mySpeedSearch.getEnteredPrefix()) ? mySpeedSearch.getEnteredPrefix() : null;
}
private class MyTreeSpeedSearch extends TreeSpeedSearch {
MyTreeSpeedSearch() {
super(myTree, path -> getSpeedSearchText(TreeUtil.getLastUserObject(path)), true);
}
@Override
protected Point getComponentLocationOnScreen() {
return myPopup.getContent().getLocationOnScreen();
}
@Override
protected Rectangle getComponentVisibleRect() {
return myPopup.getContent().getVisibleRect();
}
@Override
public Object findElement(String s) {
List<SpeedSearchObjectWithWeight> elements = SpeedSearchObjectWithWeight.findElement(s, this);
SpeedSearchObjectWithWeight best = ContainerUtil.getFirstItem(elements);
if (best == null) return null;
if (myInitialElement instanceof PsiElement) {
PsiElement initial = (PsiElement)myInitialElement;
// find children of the initial element
SpeedSearchObjectWithWeight bestForParent = find(initial, elements, FileStructurePopup::isParent);
if (bestForParent != null) return bestForParent.node;
// find siblings of the initial element
PsiElement parent = initial.getParent();
if (parent != null) {
SpeedSearchObjectWithWeight bestSibling = find(parent, elements, FileStructurePopup::isParent);
if (bestSibling != null) return bestSibling.node;
}
// find grand children of the initial element
SpeedSearchObjectWithWeight bestForAncestor = find(initial, elements, FileStructurePopup::isAncestor);
if (bestForAncestor != null) return bestForAncestor.node;
}
return best.node;
}
}
@Nullable
private static SpeedSearchObjectWithWeight find(@Nonnull PsiElement element, @Nonnull List<SpeedSearchObjectWithWeight> objects, @Nonnull BiPredicate<PsiElement, TreePath> predicate) {
return ContainerUtil.find(objects, object -> predicate.test(element, ObjectUtils.tryCast(object.node, TreePath.class)));
}
private static boolean isElement(@Nonnull PsiElement element, @Nullable TreePath path) {
return element.equals(StructureViewComponent.unwrapValue(TreeUtil.getLastUserObject(FilteringTreeStructure.FilteringNode.class, path)));
}
private static boolean isParent(@Nonnull PsiElement parent, @Nullable TreePath path) {
return path != null && isElement(parent, path.getParentPath());
}
private static boolean isAncestor(@Nonnull PsiElement ancestor, @Nullable TreePath path) {
while (path != null) {
if (isElement(ancestor, path)) return true;
path = path.getParentPath();
}
return false;
}
static class MyTree extends DnDAwareTree implements PlaceProvider<String> {
MyTree(TreeModel treeModel) {
super(treeModel);
setRootVisible(false);
setShowsRootHandles(true);
HintUpdateSupply.installHintUpdateSupply(this, o -> {
Object value = StructureViewComponent.unwrapValue(o);
return value instanceof PsiElement ? (PsiElement)value : null;
});
}
@Override
public String getPlace() {
return ActionPlaces.STRUCTURE_VIEW_POPUP;
}
}
}
| |
/*
* Copyright 2006-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.javadsl.design;
import com.consol.citrus.dsl.testng.TestNGCitrusTestDesigner;
import com.consol.citrus.annotations.CitrusTest;
import com.consol.citrus.http.message.HttpMessage;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.testng.annotations.Test;
/**
* @author Christoph Deppisch
*/
@Test
public class HttpMessageControllerJavaIT extends TestNGCitrusTestDesigner {
@CitrusTest(name = "HttpMessageControllerJavaIT")
public void httpMessageControllerIT() {
variable("id", "123456789");
echo("First request without query parameter and context path variables.");
parallel(
http().client("httpClient")
.get()
.uri("http://localhost:8072")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.accept("application/xml;charset=UTF-8")),
sequential(
http().server("httpServerRequestEndpoint")
.get()
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
)
);
http().client("httpClient")
.response(HttpStatus.OK)
.timeout(2000L)
.version("HTTP/1.1");
echo("Use context path variables.");
parallel(
http().client("httpClient")
.get()
.uri("http://localhost:8072/test/user/${id}")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.accept("application/xml;charset=UTF-8")),
sequential(
http().server("httpServerRequestEndpoint")
.get("/test/user/${id}")
.message(new HttpMessage()
.contentType("text/html")
.method(HttpMethod.GET)
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
)
);
http().client("httpClient")
.response(HttpStatus.OK)
.timeout(2000L)
.version("HTTP/1.1");
echo("Use query parameter and context path variables.");
parallel(
http().client("httpClient")
.get()
.uri("http://localhost:8072/test")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.queryParam("id", "${id}")
.queryParam("name", "TestUser")
.accept("application/xml;charset=UTF-8"))
.path("user"),
sequential(
http().server("httpServerRequestEndpoint")
.get("/test/user")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
.queryParam("id", "${id}")
.queryParam("name", "TestUser")
)
);
http().client("httpClient")
.response(HttpStatus.OK)
.timeout(2000L)
.version("HTTP/1.1");
}
@CitrusTest(name = "HttpMessageControllerJavaDeprecatedIT")
public void httpMessageControllerDeprecatedIT() {
variable("id", "123456789");
echo("First request without query parameter and context path variables.");
parallel(
send("httpClient")
.http()
.uri("http://localhost:8072")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.accept("application/xml;charset=UTF-8")),
sequential(
receive("httpServerRequestEndpoint")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
.http().uri("/").contextPath("")
)
);
receive("httpClient")
.timeout(2000L)
.http()
.status(HttpStatus.OK)
.version("HTTP/1.1");
echo("Use context path variables.");
parallel(
send("httpClient")
.http()
.uri("http://localhost:8072/test/user/${id}")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.accept("application/xml;charset=UTF-8")),
sequential(
receive("httpServerRequestEndpoint")
.http()
.message(new HttpMessage()
.contentType("text/html")
.method(HttpMethod.GET)
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
.uri("/test/user/${id}")
.contextPath("")
)
);
receive("httpClient")
.timeout(2000L)
.http()
.status(HttpStatus.OK)
.version("HTTP/1.1");
echo("Use query parameter and context path variables.");
parallel(
send("httpClient")
.http()
.uri("http://localhost:8072/test")
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.queryParam("id", "${id}")
.queryParam("name", "TestUser")
.accept("application/xml;charset=UTF-8"))
.path("user"),
sequential(
receive("httpServerRequestEndpoint")
.http()
.message(new HttpMessage()
.method(HttpMethod.GET)
.contentType("text/html")
.header("Host", "localhost:8072")
.accept("application/xml;charset=UTF-8"))
.uri("/test/user")
.contextPath("")
.queryParam("id", "${id}")
.queryParam("name", "TestUser")
)
);
receive("httpClient")
.timeout(2000L)
.http()
.status(HttpStatus.OK)
.version("HTTP/1.1");
}
}
| |
package com.sdsmdg.kd.trianglify.models;
/**
* <h1>Palette</h1>
* <b>Description : </b>
* Set of 9 colors that are used to color a triangulation. Palette contains few predefined color sets
* as well as method to perform operations on palette.
*
* @author kriti
* @since 18/3/17.
*/
public class Palette {
public static final int DEFAULT_PALETTE_COUNT = 28;
private static final int YL_GN = 0;
private static final int YL = 1;
private static final int YL_GN_BU = 2;
private static final int GN_BU = 3;
private static final int BU_GN = 4;
private static final int PU_BU_GN = 5;
private static final int PU_BU = 6;
private static final int BU_PU = 7;
private static final int RD_PU = 8;
private static final int PU_RD = 9;
private static final int OR_RD = 10;
private static final int YL_OR_RD = 11;
private static final int YL_OR_BR = 12;
private static final int PURPLES = 13;
private static final int BLUES = 14;
private static final int GREENS = 15;
private static final int ORANGES = 16;
private static final int REDS = 17;
private static final int GREYS = 18;
private static final int PU_OR = 19;
private static final int BR_BL = 20;
private static final int PU_RD_GN = 21;
private static final int PI_YL_GN = 22;
private static final int RD_BU = 23;
private static final int RD_GY = 24;
private static final int RD_YL_BU = 25;
private static final int SPECTRAL = 26;
private static final int RD_YL_GN = 27;
private int[] colors;
public int[] getColors() {
return colors;
}
public void setColors(int[] colors) {
if (colors.length != 9) {
throw new IllegalArgumentException("Colors array length should exactly be 9");
}
this.colors = colors;
}
/**
* Return palette object corresponding to supplied value of paletteIndex, palette is constructed
* from a predefined set of colors
* @param paletteIndex Index of palette to return
* @return Palette object generated from predefined set of colors
*/
public static Palette getPalette(int paletteIndex) {
switch (paletteIndex) {
case YL:
return new Palette(0xffffe0, 0xffffcc, 0xfffacd, 0xffff00, 0xffef00, 0xffd300, 0xf8de7e, 0xffd700, 0xc3b091);
case YL_GN:
return new Palette(0xffffe5, 0xf7fcb9, 0xd9f0a3, 0xaddd8e, 0x78c679, 0x41ab5d, 0x238443, 0x006837, 0x004529);
case YL_GN_BU:
return new Palette(0xffffd9, 0xedf8b1, 0xc7e9b4, 0x7fcdbb, 0x41b6c4, 0x1d91c0, 0x225ea8, 0x253494, 0x081d58);
case GN_BU:
return new Palette(0xf7fcf0, 0xe0f3db, 0xccebc5, 0xa8ddb5, 0x7bccc4, 0x4eb3d3, 0x2b8cbe, 0x0868ac, 0x084081);
case BU_GN:
return new Palette(0xf7fcfd, 0xe5f5f9, 0xccece6, 0x99d8c9, 0x66c2a4, 0x41ae76, 0x238b45, 0x006d2c, 0x00441c);
case PU_BU_GN:
return new Palette(0xfff7fb, 0xece2f0, 0xd0d1e6, 0xa6bddb, 0x67a9cf, 0x3690c0, 0x02818a, 0x016c59, 0x014636);
case PU_BU:
return new Palette(0xfff7fb, 0xece7f2, 0xd0d1e6, 0xa6bddb, 0x74a9cf, 0x3690c0, 0x0570b0, 0x045a8d, 0x023858);
case BU_PU:
return new Palette(0xf7fcfd, 0xe0ecf4, 0xbfd3e6, 0x9ebcda, 0x8c96c6, 0x8c6bb1, 0x88419d, 0x810f7c, 0x4d004b);
case RD_PU:
return new Palette(0xfff7f3, 0xfde0dd, 0xfcc5c0, 0xfa9fb5, 0xf768a1, 0xdd3497, 0xae017e, 0x7a0177, 0x49006a);
case PU_RD:
return new Palette(0xf7f4f9, 0xe7e1ef, 0xd4b9da, 0xc994c7, 0xdf65b0, 0xe7298a, 0xce1256, 0x980043, 0x67001f);
case OR_RD:
return new Palette(0xfff7ec, 0xfee8c8, 0xfdd49e, 0xfdbb84, 0xfc8d59, 0xef6548, 0xd7301f, 0xb30000, 0x7f0000);
case YL_OR_RD:
return new Palette(0xffffcc, 0xffeda0, 0xfed976, 0xfeb24c, 0xfd8d3c, 0xfc4e2a, 0xe31a1c, 0xbd0026, 0x800026);
case YL_OR_BR:
return new Palette(0xffffe5, 0xfff7bc, 0xfee391, 0xfec44f, 0xfe9929, 0xec7014, 0xcc4c02, 0x993404, 0x662506);
case PURPLES:
return new Palette(0xfcfbfd, 0xefedf5, 0xdadaeb, 0xbcbddc, 0x9e9ac8, 0x807dba, 0x6a51a3, 0x54278f, 0x3f007d);
case BLUES:
return new Palette(0xf7fbff, 0xdeebf7, 0xc6dbef, 0x9ecae1, 0x6baed6, 0x4292c6, 0x2171b5, 0x08519c, 0x08306b);
case GREENS:
return new Palette(0xf7fcf5, 0xe5f5e0, 0xc7e9c0, 0xa1d99b, 0x74c476, 0x41ab5d, 0x238b45, 0x006d2c, 0x00441b);
case ORANGES:
return new Palette(0xfff5eb, 0xfee6ce, 0xfdd0a2, 0xfdae6b, 0xfd8d3c, 0xf16913, 0xd94801, 0xa63603, 0x7f2704);
case REDS:
return new Palette(0xfff5f0, 0xfee0d2, 0xfcbba1, 0xfc9272, 0xfb6a4a, 0xef3b2c, 0xcb181d, 0xa50f15, 0x67000d);
case GREYS:
return new Palette(0xffffff, 0xf0f0f0, 0xd9d9d9, 0xbdbdbd, 0x969696, 0x737373, 0x525252, 0x252525, 0x000000);
case PU_OR:
return new Palette(0x7f3b08, 0xb35806, 0xe08214, 0xfdb863, 0xfee0b6, 0xf7f7f7, 0xd8daeb, 0xb2abd2, 0x8073ac);
case BR_BL:
return new Palette(0x543005, 0x8c510a, 0xbf812d, 0xdfc27d, 0xf6e8c3, 0xf5f5f5, 0xc7eae5, 0x80cdc1, 0x35978f);
case PU_RD_GN:
return new Palette(0x40004b, 0x762a83, 0x9970ab, 0xc2a5cf, 0xe7d4e8, 0xf7f7f7, 0xd9f0d3, 0xa6dba0, 0x5aae61);
case PI_YL_GN:
return new Palette(0x8e0152, 0xc51b7d, 0xde77ae, 0xf1b6da, 0xfde0ef, 0xf7f7f7, 0xe6f5d0, 0xb8e186, 0x7fbc41);
case RD_BU:
return new Palette(0x67001f, 0xb2182b, 0xd6604d, 0xf4a582, 0xfddbc7, 0xf7f7f7, 0xd1e5f0, 0x92c5de, 0x4393c3);
case RD_GY:
return new Palette(0x67001f, 0xb2182b, 0xd6604d, 0xf4a582, 0xfddbc7, 0xffffff, 0xe0e0e0, 0xbababa, 0x878787);
case RD_YL_BU:
return new Palette(0xa50026, 0xd73027, 0xf46d43, 0xfdae61, 0xfee090, 0xffffbf, 0xe0f3f8, 0xabd9e9, 0x74add1);
case SPECTRAL:
return new Palette(0x9e0142, 0xd53e4f, 0xf46d43, 0xfdae61, 0xfee08b, 0xffffbf, 0xe6f598, 0xabdda4, 0x66c2a5);
case RD_YL_GN:
return new Palette(0xa50026, 0xd73027, 0xf46d43, 0xfdae61, 0xfee08b, 0xffffbf, 0xd9ef8b, 0xa6d96a, 0x66bd63);
default:
throw new IllegalArgumentException("Index should be less Palette.DEFAULT_PALETTE_COUNT");
}
}
/**
* Returns index of palette object passed from list of palettes predefined in Palette
* @param palette Object for finding index
* @return Index from predefined pallete or -1 if not found
*/
public static int indexOf(Palette palette) {
int pos = -1;
int[] passedPaletteColors = palette.getColors();
for (int i = 0; i < Palette.DEFAULT_PALETTE_COUNT; i++) {
int[] calledPaletteColors = Palette.getPalette(i).getColors();
for (int j = 0; j < 9; j++) {
if (passedPaletteColors[j] != calledPaletteColors[j]) {
break;
}
if (j == 8) {
return i;
}
}
}
return pos;
}
public Palette(int c0, int c1, int c2, int c3, int c4, int c5, int c6, int c7, int c8) {
colors = new int[9];
colors[0] = c0;
colors[1] = c1;
colors[2] = c2;
colors[3] = c3;
colors[4] = c4;
colors[5] = c5;
colors[6] = c6;
colors[7] = c7;
colors[8] = c8;
}
public Palette(int[] colors) {
if (colors.length != 9) {
throw new IllegalArgumentException("Colors array length should exactly be 9");
}
this.colors = colors;
}
/**
* Returns color corresponding to index passed from the set of color for a palette
* @param index Index of color in set of color for current palette object
* @return color as int without alpha channel
*/
public int getColor(int index) {
switch (index) {
case 0:
return colors[0];
case 1:
return colors[1];
case 2:
return colors[2];
case 3:
return colors[3];
case 4:
return colors[4];
case 5:
return colors[5];
case 6:
return colors[6];
case 7:
return colors[7];
case 8:
return colors[8];
default:
throw new IllegalArgumentException("Index should be less than 9");
}
}
}
| |
package com.cdgore.ankus;
import java.io.IOException;
import java.io.PrintWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.mahout.common.AbstractJob;
import org.apache.mahout.common.ClassUtils;
import org.apache.mahout.common.iterator.sequencefile.SequenceFileValueIterator;
import org.apache.mahout.math.DenseVector;
import org.apache.mahout.math.Vector;
import org.apache.mahout.math.VectorWritable;
import org.apache.mahout.math.function.Functions;
import com.google.common.io.Closeables;
/**
* MahoutColumnMeans is a job for calculating the column-wise mean of a
* DistributedRowMatrix. This job can be accessed using
* DistributedRowMatrix.columnMeans()
*/
public class MahoutColumnMeans extends Configured implements Tool {
//public class MahoutColumnMeans extends AbstractJob {
public static final String VECTOR_CLASS = "DistributedRowMatrix.columnMeans.vector.class";
public MahoutColumnMeans() {
}
public MahoutColumnMeans(Configuration conf) {
super();
}
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
int exitCode = ToolRunner.run(new MahoutColumnMeans(), args);
System.exit(exitCode);
}
// public static Vector run(Configuration conf, Path inputPath, Path outputPath)
// throws IOException {
// return run(conf, inputPath, outputPath, null);
// }
/**
* Job for calculating column-wise mean of a DistributedRowMatrix
*
* @param initialConf
* @param inputPath
* path to DistributedRowMatrix input
* @param outputPath
* path for VectorWritable output
* @param vectorClass
* String of desired class for returned vector e.g. DenseVector,
* RandomAccessSparseVector (may be null for {@link DenseVector}
* )
* @return Vector containing column-wise mean of DistributedRowMatrix
*/
// public Vector run(Configuration initialConf, Path inputPath,
// Path outputPath, String vectorClass) throws IOException {
public int run(String[] arg0) throws Exception {
// addInputOption();
// addOutputOption();
// addOption("inputPath", "ip", "input path", true);
// addOption("outputPath" , "op", "output path", true);
// addOption("vectorClass", "vc",
// "Class name of preferred output class (default is DenseVector)",
// false);
String vectorClass = null;
// if (hasOption("vectorClass"))
// vectorClass = getOption("vectorClass");
Configuration conf = getConf();
Configuration.dumpConfiguration(conf, new PrintWriter(System.out));
conf.set(VECTOR_CLASS,
vectorClass == null ? DenseVector.class.getName() : vectorClass);
Path inputPath = null;
Path outputPath = null;
if (arg0.length == 2) {
inputPath = new Path(arg0[0]);//getInputPath();
outputPath = new Path(arg0[1]);//getOutputPath();
} else {
throw new IOException("Must specify input path and output path");
}
Job job = new Job(conf, "MahoutColumnMeans");
job.setJarByClass(MahoutColumnMeans.class);
FileOutputFormat.setOutputPath(job, outputPath);
outputPath.getFileSystem(job.getConfiguration()).delete(outputPath,
true);
job.setNumReduceTasks(1);
FileOutputFormat.setOutputPath(job, outputPath);
FileInputFormat.addInputPath(job, inputPath);
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
FileOutputFormat.setOutputPath(job, outputPath);
job.setMapperClass(MatrixColumnMeansMapper.class);
job.setReducerClass(MatrixColumnMeansReducer.class);
job.setMapOutputKeyClass(NullWritable.class);
job.setMapOutputValueClass(VectorWritable.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(VectorWritable.class);
job.submit();
return job.waitForCompletion(true) ? 0 : 1;
}
/**
* Mapper for calculation of column-wise mean.
*/
public static class MatrixColumnMeansMapper extends
Mapper<Writable, VectorWritable, NullWritable, VectorWritable> {
private Vector runningSum;
private String vectorClass;
@Override
public void setup(Context context) {
vectorClass = context.getConfiguration().get(VECTOR_CLASS);
}
/**
* The mapper computes a running sum of the vectors the task has seen.
* Element 0 of the running sum vector contains a count of the number of
* vectors that have been seen. The remaining elements contain the
* column-wise running sum. Nothing is written at this stage
*/
@Override
public void map(Writable r, VectorWritable v, Context context)
throws IOException {
if (runningSum == null) {
/*
* If this is the first vector the mapper has seen, instantiate
* a new vector using the parameter VECTOR_CLASS
*/
runningSum = ClassUtils.instantiateAs(vectorClass,
Vector.class, new Class<?>[] { int.class },
new Object[] { v.get().size() + 1 });
runningSum.set(0, 1);
runningSum.viewPart(1, v.get().size()).assign(v.get());
} else {
runningSum.set(0, runningSum.get(0) + 1);
runningSum.viewPart(1, v.get().size()).assign(v.get(),
Functions.PLUS);
}
}
/**
* The column-wise sum is written at the cleanup stage. A single reducer
* is forced so null can be used for the key
*/
@Override
public void cleanup(Context context) throws InterruptedException,
IOException {
if (runningSum != null) {
context.write(NullWritable.get(),
new VectorWritable(runningSum));
}
}
}
/**
* The reducer adds the partial column-wise sums from each of the mappers to
* compute the total column-wise sum. The total sum is then divided by the
* total count of vectors to determine the column-wise mean.
*/
public static class MatrixColumnMeansReducer extends
Reducer<NullWritable, VectorWritable, IntWritable, VectorWritable> {
private static final IntWritable ONE = new IntWritable(1);
private String vectorClass;
private Vector outputVector;
private final VectorWritable outputVectorWritable = new VectorWritable();
@Override
public void setup(Context context) {
vectorClass = context.getConfiguration().get(VECTOR_CLASS);
}
@Override
public void reduce(NullWritable n, Iterable<VectorWritable> vectors,
Context context) throws IOException, InterruptedException {
/**
* Add together partial column-wise sums from mappers
*/
for (VectorWritable v : vectors) {
if (outputVector == null) {
outputVector = v.get();
} else {
outputVector.assign(v.get(), Functions.PLUS);
}
}
/**
* Divide total column-wise sum by count of vectors, which
* corresponds to the number of rows in the DistributedRowMatrix
*/
if (outputVector != null) {
outputVectorWritable.set(outputVector.viewPart(1,
outputVector.size() - 1).divide(outputVector.get(0)));
context.write(ONE, outputVectorWritable);
} else {
Vector emptyVector = ClassUtils.instantiateAs(vectorClass,
Vector.class, new Class<?>[] { int.class },
new Object[] { 0 });
context.write(ONE, new VectorWritable(emptyVector));
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.network.jms;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Iterator;
import javax.jms.Connection;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.util.Wait;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TopicBridgeStandaloneReconnectTest {
private SimpleJmsTopicConnector jmsTopicConnector;
private BrokerService localBroker;
private BrokerService foreignBroker;
private ActiveMQConnectionFactory localConnectionFactory;
private ActiveMQConnectionFactory foreignConnectionFactory;
private Destination outbound;
private Destination inbound;
private final ArrayList<Connection> connections = new ArrayList<Connection>();
@Test
public void testSendAndReceiveOverConnectedBridges() throws Exception {
startLocalBroker();
startForeignBroker();
jmsTopicConnector.start();
final MessageConsumer local = createConsumerForLocalBroker();
final MessageConsumer foreign = createConsumerForForeignBroker();
sendMessageToForeignBroker("to.foreign.broker");
sendMessageToLocalBroker("to.local.broker");
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = local.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.local.broker")) {
return true;
}
return false;
}
}));
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = foreign.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.foreign.broker")) {
return true;
}
return false;
}
}));
}
@Test
public void testSendAndReceiveOverBridgeWhenStartedBeforeBrokers() throws Exception {
jmsTopicConnector.start();
startLocalBroker();
startForeignBroker();
assertTrue("Should have Connected.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return jmsTopicConnector.isConnected();
}
}));
final MessageConsumer local = createConsumerForLocalBroker();
final MessageConsumer foreign = createConsumerForForeignBroker();
sendMessageToForeignBroker("to.foreign.broker");
sendMessageToLocalBroker("to.local.broker");
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = local.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.local.broker")) {
return true;
}
return false;
}
}));
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = foreign.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.foreign.broker")) {
return true;
}
return false;
}
}));
}
@Test
public void testSendAndReceiveOverBridgeWithRestart() throws Exception {
startLocalBroker();
startForeignBroker();
jmsTopicConnector.start();
assertTrue("Should have Connected.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return jmsTopicConnector.isConnected();
}
}));
stopLocalBroker();
stopForeignBroker();
assertTrue("Should have detected connection drop.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return !jmsTopicConnector.isConnected();
}
}));
startLocalBroker();
startForeignBroker();
assertTrue("Should have Re-Connected.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return jmsTopicConnector.isConnected();
}
}));
final MessageConsumer local = createConsumerForLocalBroker();
final MessageConsumer foreign = createConsumerForForeignBroker();
sendMessageToForeignBroker("to.foreign.broker");
sendMessageToLocalBroker("to.local.broker");
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = local.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.local.broker")) {
return true;
}
return false;
}
}));
assertTrue("Should have received a Message.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
Message message = foreign.receive(100);
if (message != null && ((TextMessage) message).getText().equals("to.foreign.broker")) {
return true;
}
return false;
}
}));
}
@Before
public void setUp() throws Exception {
localConnectionFactory = createLocalConnectionFactory();
foreignConnectionFactory = createForeignConnectionFactory();
outbound = new ActiveMQTopic("RECONNECT.TEST.OUT.TOPIC");
inbound = new ActiveMQTopic("RECONNECT.TEST.IN.TOPIC");
jmsTopicConnector = new SimpleJmsTopicConnector();
// Wire the bridges.
jmsTopicConnector.setOutboundTopicBridges(
new OutboundTopicBridge[] {new OutboundTopicBridge("RECONNECT.TEST.OUT.TOPIC")});
jmsTopicConnector.setInboundTopicBridges(
new InboundTopicBridge[] {new InboundTopicBridge("RECONNECT.TEST.IN.TOPIC")});
// Tell it how to reach the two brokers.
jmsTopicConnector.setOutboundTopicConnectionFactory(
new ActiveMQConnectionFactory("tcp://localhost:61617"));
jmsTopicConnector.setLocalTopicConnectionFactory(
new ActiveMQConnectionFactory("tcp://localhost:61616"));
}
@After
public void tearDown() throws Exception {
disposeConsumerConnections();
try {
jmsTopicConnector.stop();
jmsTopicConnector = null;
} catch (Exception e) {
}
try {
stopLocalBroker();
} catch (Throwable e) {
}
try {
stopForeignBroker();
} catch (Throwable e) {
}
}
protected void disposeConsumerConnections() {
for (Iterator<Connection> iter = connections.iterator(); iter.hasNext();) {
Connection connection = iter.next();
try {
connection.close();
} catch (Throwable ignore) {
}
}
}
protected void startLocalBroker() throws Exception {
if (localBroker == null) {
localBroker = createFirstBroker();
localBroker.start();
localBroker.waitUntilStarted();
}
}
protected void stopLocalBroker() throws Exception {
if (localBroker != null) {
localBroker.stop();
localBroker.waitUntilStopped();
localBroker = null;
}
}
protected void startForeignBroker() throws Exception {
if (foreignBroker == null) {
foreignBroker = createSecondBroker();
foreignBroker.start();
foreignBroker.waitUntilStarted();
}
}
protected void stopForeignBroker() throws Exception {
if (foreignBroker != null) {
foreignBroker.stop();
foreignBroker.waitUntilStopped();
foreignBroker = null;
}
}
protected BrokerService createFirstBroker() throws Exception {
BrokerService broker = new BrokerService();
broker.setBrokerName("broker1");
broker.setPersistent(false);
broker.setUseJmx(false);
broker.addConnector("tcp://localhost:61616");
return broker;
}
protected BrokerService createSecondBroker() throws Exception {
BrokerService broker = new BrokerService();
broker.setBrokerName("broker2");
broker.setPersistent(false);
broker.setUseJmx(false);
broker.addConnector("tcp://localhost:61617");
return broker;
}
protected ActiveMQConnectionFactory createLocalConnectionFactory() {
return new ActiveMQConnectionFactory("tcp://localhost:61616");
}
protected ActiveMQConnectionFactory createForeignConnectionFactory() {
return new ActiveMQConnectionFactory("tcp://localhost:61617");
}
protected void sendMessageToForeignBroker(String text) throws JMSException {
Connection connection = null;
try {
connection = localConnectionFactory.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(outbound);
TextMessage message = session.createTextMessage();
message.setText(text);
producer.send(message);
} finally {
try {
connection.close();
} catch (Throwable ignore) {
}
}
}
protected void sendMessageToLocalBroker(String text) throws JMSException {
Connection connection = null;
try {
connection = foreignConnectionFactory.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(inbound);
TextMessage message = session.createTextMessage();
message.setText(text);
producer.send(message);
} finally {
try {
connection.close();
} catch (Throwable ignore) {
}
}
}
protected MessageConsumer createConsumerForLocalBroker() throws JMSException {
Connection connection = localConnectionFactory.createConnection();
connections.add(connection);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
return session.createConsumer(inbound);
}
protected MessageConsumer createConsumerForForeignBroker() throws JMSException {
Connection connection = foreignConnectionFactory.createConnection();
connections.add(connection);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
return session.createConsumer(outbound);
}
}
| |
/*
Copyright 2012 Software Freedom Conservancy
Copyright 2011-2012 Selenium committers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import static org.openqa.selenium.WaitingConditions.newWindowIsOpened;
import static org.openqa.selenium.WaitingConditions.pageSourceToContain;
import static org.openqa.selenium.support.ui.ExpectedConditions.titleIs;
import static org.openqa.selenium.testing.Ignore.Driver.ANDROID;
import static org.openqa.selenium.testing.Ignore.Driver.CHROME;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.IPHONE;
import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA_MOBILE;
import static org.openqa.selenium.testing.Ignore.Driver.SAFARI;
import static org.openqa.selenium.testing.TestUtilities.isFirefox;
import static org.openqa.selenium.testing.TestUtilities.isNativeEventsEnabled;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.interactions.MoveTargetOutOfBoundsException;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import java.util.Set;
public class ClickTest extends JUnit4TestBase {
@Before
public void setUp() throws Exception {
driver.get(pages.clicksPage);
}
@After
public void tearDown() throws Exception {
driver.switchTo().defaultContent();
}
@Test
public void testCanClickOnALinkAndFollowIt() {
driver.findElement(By.id("normal")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Ignore(value = {OPERA, MARIONETTE}, reason = "Not tested.")
@Test
public void testCanClickOnALinkThatOverflowsAndFollowIt() {
driver.findElement(By.id("overflowLink")).click();
wait.until(titleIs("XHTML Test Page"));
}
@JavascriptEnabled
@Test
@Ignore(MARIONETTE)
public void testCanClickOnAnAnchorAndNotReloadThePage() {
((JavascriptExecutor) driver).executeScript("document.latch = true");
driver.findElement(By.id("anchor")).click();
Boolean samePage = (Boolean) ((JavascriptExecutor) driver)
.executeScript("return document.latch");
assertEquals("Latch was reset", Boolean.TRUE, samePage);
}
@Ignore(value = {OPERA, ANDROID, OPERA_MOBILE, MARIONETTE},
reason = "Opera: Incorrect runtime retrieved, Android: A bug in emulator JSC engine on " +
"2.2, works on devices.")
@Test
public void testCanClickOnALinkThatUpdatesAnotherFrame() {
driver.switchTo().frame("source");
driver.findElement(By.id("otherframe")).click();
driver.switchTo().defaultContent().switchTo().frame("target");
wait.until(pageSourceToContain("Hello WebDriver"));
}
@JavascriptEnabled
@Ignore(value = {OPERA, ANDROID, OPERA_MOBILE, MARIONETTE},
reason = "Opera: Incorrect runtime retrieved; " +
"Android: fails when running with other tests.")
@Test
public void testElementsFoundByJsCanLoadUpdatesInAnotherFrame() {
driver.switchTo().frame("source");
WebElement toClick = (WebElement) ((JavascriptExecutor) driver).executeScript(
"return document.getElementById('otherframe');"
);
toClick.click();
driver.switchTo().defaultContent().switchTo().frame("target");
wait.until(pageSourceToContain("Hello WebDriver"));
}
@JavascriptEnabled
@Ignore(value = {OPERA, ANDROID, OPERA_MOBILE, MARIONETTE}, reason =
"Opera: Incorrect runtime retrieved, Android: fails when running with other tests.")
@Test
public void testJsLocatedElementsCanUpdateFramesIfFoundSomehowElse() {
driver.switchTo().frame("source");
// Prime the cache of elements
driver.findElement(By.id("otherframe"));
// This _should_ return the same element
WebElement toClick = (WebElement) ((JavascriptExecutor) driver).executeScript(
"return document.getElementById('otherframe');"
);
toClick.click();
driver.switchTo().defaultContent().switchTo().frame("target");
wait.until(pageSourceToContain("Hello WebDriver"));
}
@JavascriptEnabled
@Test
public void testCanClickOnAnElementWithTopSetToANegativeNumber() {
String page = appServer.whereIs("styledPage.html");
driver.get(page);
WebElement searchBox = driver.findElement(By.name("searchBox"));
searchBox.sendKeys("Cheese");
driver.findElement(By.name("btn")).click();
String log = driver.findElement(By.id("log")).getText();
assertEquals("click", log);
}
@Ignore(value = {ANDROID, CHROME, IPHONE, SAFARI, OPERA_MOBILE}, reason = "Not tested")
@Test
public void testShouldClickOnFirstBoundingClientRectWithNonZeroSize() {
driver.findElement(By.id("twoClientRects")).click();
wait.until(titleIs("XHTML Test Page"));
}
@JavascriptEnabled
@Ignore(value = {ANDROID, CHROME, OPERA, MARIONETTE}, reason = "Not implemented")
@Test
public void testShouldSetRelatedTargetForMouseOver() {
driver.get(pages.javascriptPage);
driver.findElement(By.id("movable")).click();
String log = driver.findElement(By.id("result")).getText();
// Note: It is not guaranteed that the relatedTarget property of the mouseover
// event will be the parent, when using native events. Only check that the mouse
// has moved to this element, not that the parent element was the related target.
if (isNativeEventsEnabled(driver)) {
assertTrue("Should have moved to this element.", log.startsWith("parent matches?"));
} else {
assertEquals("parent matches? true", log);
}
}
@JavascriptEnabled
@NoDriverAfterTest
@Ignore(value = {ANDROID, IPHONE, OPERA, SAFARI, OPERA_MOBILE},
reason = "Doesn't support multiple windows; Safari: issue 3693")
@Test
public void testShouldOnlyFollowHrefOnce() {
driver.get(pages.clicksPage);
String current = driver.getWindowHandle();
Set<String> currentWindowHandles = driver.getWindowHandles();
try {
driver.findElement(By.id("new-window")).click();
String newWindowHandle = wait.until(newWindowIsOpened(currentWindowHandles));
driver.switchTo().window(newWindowHandle);
driver.close();
} finally {
driver.switchTo().window(current);
}
}
@Ignore
public void testShouldSetRelatedTargetForMouseOut() {
fail("Must. Write. Meaningful. Test (but we don't fire mouse outs synthetically");
}
@Test
public void testClickingLabelShouldSetCheckbox() {
driver.get(pages.formPage);
driver.findElement(By.id("label-for-checkbox-with-label")).click();
assertTrue(
"Should be selected",
driver.findElement(By.id("checkbox-with-label")).isSelected());
}
@Test
public void testCanClickOnALinkWithEnclosedImage() {
driver.findElement(By.id("link-with-enclosed-image")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
public void testCanClickOnAnImageEnclosedInALink() {
driver.findElement(By.id("link-with-enclosed-image")).findElement(By.tagName("img")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
public void testCanClickOnALinkThatContainsTextWrappedInASpan() {
driver.findElement(By.id("link-with-enclosed-span")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
@Ignore(MARIONETTE)
public void testCanClickOnALinkThatContainsEmbeddedBlockElements() {
driver.findElement(By.id("embeddedBlock")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
public void testCanClickOnAnElementEnclosedInALink() {
driver.findElement(By.id("link-with-enclosed-span")).findElement(By.tagName("span")).click();
wait.until(titleIs("XHTML Test Page"));
}
// See http://code.google.com/p/selenium/issues/attachmentText?id=2700
@Test
public void testShouldBeAbleToClickOnAnElementInTheViewport() {
String url = appServer.whereIs("click_out_of_bounds.html");
driver.get(url);
WebElement button = driver.findElement(By.id("button"));
try {
button.click();
} catch (MoveTargetOutOfBoundsException e) {
fail("Should not be out of bounds: " + e.getMessage());
}
}
@Test
public void testClicksASurroundingStrongTag() {
driver.get(appServer.whereIs("ClickTest_testClicksASurroundingStrongTag.html"));
driver.findElement(By.tagName("a")).click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
@Ignore(value = {IE, OPERA, OPERA_MOBILE, ANDROID, IPHONE, MARIONETTE}, reason
= "Opera, IE: failed, others: not tested")
public void testCanClickAnImageMapArea() {
driver.get(appServer.whereIs("click_tests/google_map.html"));
driver.findElement(By.id("rectG")).click();
wait.until(titleIs("Target Page 1"));
driver.get(appServer.whereIs("click_tests/google_map.html"));
driver.findElement(By.id("circleO")).click();
wait.until(titleIs("Target Page 2"));
driver.get(appServer.whereIs("click_tests/google_map.html"));
driver.findElement(By.id("polyLE")).click();
wait.until(titleIs("Target Page 3"));
}
@Test
@Ignore(value = {OPERA, OPERA_MOBILE, ANDROID, IPHONE, MARIONETTE}, reason
= "Not tested against these browsers")
public void testShouldBeAbleToClickOnAnElementGreaterThanTwoViewports() {
String url = appServer.whereIs("click_too_big.html");
driver.get(url);
WebElement element = driver.findElement(By.id("click"));
element.click();
wait.until(titleIs("clicks"));
}
@Test
@Ignore(value = {CHROME, OPERA, OPERA_MOBILE, ANDROID, IPHONE, MARIONETTE}, reason
= "Chrome: failed, Firefox: failed with native events, others: not tested")
public void testShouldBeAbleToClickOnAnElementInFrameGreaterThanTwoViewports() {
assumeFalse(isFirefox(driver) && isNativeEventsEnabled(driver));
String url = appServer.whereIs("click_too_big_in_frame.html");
driver.get(url);
WebElement frame = driver.findElement(By.id("iframe1"));
driver.switchTo().frame(frame);
WebElement element = driver.findElement(By.id("click"));
element.click();
wait.until(titleIs("clicks"));
}
@Test
@Ignore(value = {OPERA, OPERA_MOBILE}, reason = "Opera: failed")
public void testShouldBeAbleToClickOnRTLLanguageLink() {
String url = appServer.whereIs("click_rtl.html");
driver.get(url);
WebElement element = driver.findElement(By.id("ar_link"));
element.click();
wait.until(titleIs("clicks"));
}
@Test
@Ignore(value = {OPERA, OPERA_MOBILE, ANDROID, IPHONE}, reason = "not tested")
public void testShouldBeAbleToClickOnLinkInAbsolutelyPositionedFooter() {
String url = appServer.whereIs("fixedFooterNoScroll.html");
driver.get(url);
WebElement element = driver.findElement(By.id("link"));
element.click();
wait.until(titleIs("XHTML Test Page"));
}
@Test
@Ignore(value = {OPERA, OPERA_MOBILE, ANDROID, IPHONE}, reason = "not tested")
public void testShouldBeAbleToClickOnLinkInAbsolutelyPositionedFooterInQuirksMode() {
String url = appServer.whereIs("fixedFooterNoScrollQuirksMode.html");
driver.get(url);
WebElement element = driver.findElement(By.id("link"));
element.click();
wait.until(titleIs("XHTML Test Page"));
}
@JavascriptEnabled
@Test
public void testShouldBeAbleToClickOnLinksWithNoHrefAttribute() {
driver.get(pages.javascriptPage);
WebElement element = driver.findElement(By.linkText("No href"));
element.click();
wait.until(titleIs("Changed"));
}
@JavascriptEnabled
@Test
@Ignore(value = {OPERA, OPERA_MOBILE, ANDROID, IPHONE, MARIONETTE},
reason = "Opera: fails, others: not tested")
public void testShouldBeAbleToClickOnALinkThatWrapsToTheNextLine() {
driver.get(appServer.whereIs("click_tests/link_that_wraps.html"));
driver.findElement(By.id("link")).click();
wait.until(titleIs("Submitted Successfully!"));
}
@JavascriptEnabled
@Test
@Ignore(value = {OPERA, OPERA_MOBILE, ANDROID, IPHONE, MARIONETTE},
reason = "Opera: fails, others: not tested")
public void testShouldBeAbleToClickOnASpanThatWrapsToTheNextLine() {
assumeFalse(isFirefox(driver) && isNativeEventsEnabled(driver));
driver.get(appServer.whereIs("click_tests/span_that_wraps.html"));
driver.findElement(By.id("span")).click();
wait.until(titleIs("Submitted Successfully!"));
}
}
| |
package course.labs.todomanager;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.text.ParseException;
import java.util.Date;
import android.app.ListActivity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ListView;
import android.widget.TextView;
import course.labs.todomanager.ToDoItem.Priority;
import course.labs.todomanager.ToDoItem.Status;
public class ToDoManagerActivity extends ListActivity {
private static final int ADD_TODO_ITEM_REQUEST = 0;
private static final String FILE_NAME = "TodoManagerActivityData.txt";
private static final String TAG = "Lab-UserInterface";
// IDs for menu items
private static final int MENU_DELETE = Menu.FIRST;
private static final int MENU_DUMP = Menu.FIRST + 1;
ToDoListAdapter mAdapter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a new TodoListAdapter for this ListActivity's ListView
mAdapter = new ToDoListAdapter(getApplicationContext());
// Put divider between ToDoItems and FooterView
getListView().setFooterDividersEnabled(true);
// TODO - Inflate footerView for footer_view.xml file
TextView footerView = null;
footerView = (TextView) this.getLayoutInflater().inflate(R.layout.footer_view, null);
// TODO - Add footerView to ListView
getListView().addFooterView(footerView);
// TODO - Attach Listener to FooterView
footerView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
//TODO - Implement OnClick().
Intent intent = new Intent(getBaseContext(), AddToDoActivity.class);
startActivityForResult(intent, ADD_TODO_ITEM_REQUEST);
}
});
// TODO - Attach the adapter to this ListActivity's ListView
getListView().setAdapter(mAdapter);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG,"Entered onActivityResult()");
// TODO - Check result code and request code
// if user submitted a new ToDoItem
// Create a new ToDoItem from the data Intent
// and then add it to the adapter
if (requestCode == ADD_TODO_ITEM_REQUEST) {
if (resultCode == RESULT_OK) {
ToDoItem newItem = new ToDoItem(data);
mAdapter.add(newItem);
}
}
}
// Do not modify below here
@Override
public void onResume() {
super.onResume();
// Load saved ToDoItems, if necessary
if (mAdapter.getCount() == 0)
loadItems();
}
@Override
protected void onPause() {
super.onPause();
// Save ToDoItems
saveItems();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(Menu.NONE, MENU_DELETE, Menu.NONE, "Delete all");
menu.add(Menu.NONE, MENU_DUMP, Menu.NONE, "Dump to log");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_DELETE:
mAdapter.clear();
return true;
case MENU_DUMP:
dump();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void dump() {
for (int i = 0; i < mAdapter.getCount(); i++) {
String data = ((ToDoItem) mAdapter.getItem(i)).toLog();
Log.i(TAG, "Item " + i + ": " + data.replace(ToDoItem.ITEM_SEP, ","));
}
}
// Load stored ToDoItems
private void loadItems() {
BufferedReader reader = null;
try {
FileInputStream fis = openFileInput(FILE_NAME);
reader = new BufferedReader(new InputStreamReader(fis));
String title = null;
String priority = null;
String status = null;
Date date = null;
while (null != (title = reader.readLine())) {
priority = reader.readLine();
status = reader.readLine();
date = ToDoItem.FORMAT.parse(reader.readLine());
mAdapter.add(new ToDoItem(title, Priority.valueOf(priority),
Status.valueOf(status), date));
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
} finally {
if (null != reader) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
// Save ToDoItems to file
private void saveItems() {
PrintWriter writer = null;
try {
FileOutputStream fos = openFileOutput(FILE_NAME, MODE_PRIVATE);
writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(
fos)));
for (int idx = 0; idx < mAdapter.getCount(); idx++) {
writer.println(mAdapter.getItem(idx));
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (null != writer) {
writer.close();
}
}
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/** Interface of <a href="http://schema.org/Role}">http://schema.org/Role}</a>. */
public interface Role extends Intangible {
/** Builder interface of <a href="http://schema.org/Role}">http://schema.org/Role}</a>. */
public interface Builder extends Intangible.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property endDate. */
Builder addEndDate(Date value);
/** Add a value to property endDate. */
Builder addEndDate(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property namedPosition. */
Builder addNamedPosition(Text value);
/** Add a value to property namedPosition. */
Builder addNamedPosition(URL value);
/** Add a value to property namedPosition. */
Builder addNamedPosition(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property roleName. */
Builder addRoleName(Text value);
/** Add a value to property roleName. */
Builder addRoleName(URL value);
/** Add a value to property roleName. */
Builder addRoleName(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property startDate. */
Builder addStartDate(Date value);
/** Add a value to property startDate. */
Builder addStartDate(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link Role} object. */
Role build();
}
/**
* Returns the value list of property endDate. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEndDateList();
/**
* Returns the value list of property namedPosition. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getNamedPositionList();
/**
* Returns the value list of property roleName. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getRoleNameList();
/**
* Returns the value list of property startDate. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getStartDateList();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.failover.jobstealing;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.spi.IgniteSpiAdapter;
import org.apache.ignite.spi.IgniteSpiConfiguration;
import org.apache.ignite.spi.IgniteSpiConsistencyChecked;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.IgniteSpiMBeanAdapter;
import org.apache.ignite.spi.IgniteSpiMultipleInstancesSupport;
import org.apache.ignite.spi.failover.FailoverContext;
import org.apache.ignite.spi.failover.FailoverSpi;
import static org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi.THIEF_NODE_ATTR;
/**
* Job stealing failover SPI needs to always be used in conjunction with
* {@link org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi} SPI. When {@link org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi}
* receives a <b>steal</b> request and rejects jobs so they can be routed to the
* appropriate node, it is the responsibility of this {@code JobStealingFailoverSpi}
* SPI to make sure that the job is indeed re-routed to the node that has sent the initial
* request to <b>steal</b> it.
* <p>
* {@code JobStealingFailoverSpi} knows where to route a job based on the
* {@link org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi#THIEF_NODE_ATTR} job context attribute (see {@link org.apache.ignite.compute.ComputeJobContext}).
* Prior to rejecting a job, {@link org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi} will populate this
* attribute with the ID of the node that wants to <b>steal</b> this job.
* Then {@code JobStealingFailoverSpi} will read the value of this attribute and
* route the job to the node specified.
* <p>
* If failure is caused by a node crash, and not by <b>steal</b> request, then this
* SPI behaves identically to {@link org.apache.ignite.spi.failover.always.AlwaysFailoverSpi}, and tries to find the
* next balanced node to fail-over a job to.
* <p>
* <h1 class="header">Configuration</h1>
* <h2 class="header">Mandatory</h2>
* This SPI has no mandatory configuration parameters.
* <h2 class="header">Optional</h2>
* This SPI has following optional configuration parameters:
* <ul>
* <li>Maximum failover attempts for a single job (see {@link #setMaximumFailoverAttempts(int)}).</li>
* </ul>
* Here is a Java example on how to configure grid with {@link JobStealingFailoverSpi}.
* <pre name="code" class="java">
* JobStealingFailoverSpi spi = new JobStealingFailoverSpi();
*
* // Override maximum failover attempts.
* spi.setMaximumFailoverAttempts(5);
*
* IgniteConfiguration cfg = new IgniteConfiguration();
*
* // Override default failover SPI.
* cfg.setFailoverSpiSpi(spi);
*
* // Starts grid.
* G.start(cfg);
</pre>
* Here is an example of how to configure {@link JobStealingFailoverSpi} from Spring XML configuration file.
* <pre name="code" class="xml">
* <property name="failoverSpi">
* <bean class="org.apache.ignite.spi.failover.jobstealing.JobStealingFailoverSpi">
* <property name="maximumFailoverAttempts" value="5"/>
* </bean>
* </property>
* </pre>
* <p>
* <img src="http://ignite.apache.org/images/spring-small.png">
* <br>
* For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
* @see org.apache.ignite.spi.failover.FailoverSpi
*/
@IgniteSpiMultipleInstancesSupport(true)
@IgniteSpiConsistencyChecked(optional = true)
public class JobStealingFailoverSpi extends IgniteSpiAdapter implements FailoverSpi {
/** Maximum number of attempts to execute a failed job on another node (default is {@code 5}). */
public static final int DFLT_MAX_FAILOVER_ATTEMPTS = 5;
/**
* Name of job context attribute containing all nodes a job failed on. Note
* that this list does not include nodes that a job was stolen from.
*
* @see org.apache.ignite.compute.ComputeJobContext
*/
static final String FAILED_NODE_LIST_ATTR = "gg:failover:failednodelist";
/**
* Name of job context attribute containing current failover attempt count.
* This count is incremented every time the same job gets failed over to
* another node for execution if it was not successfully stolen.
*
* @see org.apache.ignite.compute.ComputeJobContext
*/
static final String FAILOVER_ATTEMPT_COUNT_ATTR = "gg:failover:attemptcount";
/** Maximum failover attempts job context attribute name. */
private static final String MAX_FAILOVER_ATTEMPT_ATTR = "gg:failover:maxattempts";
/** Injected grid logger. */
@LoggerResource
private IgniteLogger log;
/** Maximum number of attempts to execute a failed job on another node. */
private int maxFailoverAttempts = DFLT_MAX_FAILOVER_ATTEMPTS;
/** Number of jobs that were failed over. */
private int totalFailedOverJobs;
/** Number of jobs that were stolen. */
private int totalStolenJobs;
/**
* See {@link #setMaximumFailoverAttempts(int)}.
*
* @return Maximum number of attempts to execute a failed job on another node.
*/
public int getMaximumFailoverAttempts() {
return maxFailoverAttempts;
}
/**
* Sets maximum number of attempts to execute a failed job on another node.
* If job gets stolen and thief node exists then it is not considered as
* failed job.
* If not specified, {@link #DFLT_MAX_FAILOVER_ATTEMPTS} value will be used.
* <p>
* Note this value must be identical for all grid nodes in the grid.
*
* @param maxFailoverAttempts Maximum number of attempts to execute a failed
* job on another node.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public JobStealingFailoverSpi setMaximumFailoverAttempts(int maxFailoverAttempts) {
this.maxFailoverAttempts = maxFailoverAttempts;
return this;
}
/**
* Get total number of jobs that were failed over including stolen ones.
*
* @return Total number of failed over jobs.
*/
public int getTotalFailedOverJobsCount() {
return totalFailedOverJobs;
}
/**
* Get total number of jobs that were stolen.
*
* @return Total number of stolen jobs.
*/
public int getTotalStolenJobsCount() {
return totalStolenJobs;
}
/** {@inheritDoc} */
@Override public Map<String, Object> getNodeAttributes() throws IgniteSpiException {
return F.<String, Object>asMap(createSpiAttributeName(MAX_FAILOVER_ATTEMPT_ATTR), maxFailoverAttempts);
}
/** {@inheritDoc} */
@Override public void spiStart(String igniteInstanceName) throws IgniteSpiException {
// Start SPI start stopwatch.
startStopwatch();
assertParameter(maxFailoverAttempts >= 0, "maximumFailoverAttempts >= 0");
if (log.isDebugEnabled())
log.debug(configInfo("maxFailoverAttempts", maxFailoverAttempts));
registerMBean(igniteInstanceName, new JobStealingFailoverSpiMBeanImpl(this), JobStealingFailoverSpiMBean.class);
// Ack ok start.
if (log.isDebugEnabled())
log.debug(startInfo());
}
/** {@inheritDoc} */
@Override public void spiStop() throws IgniteSpiException {
unregisterMBean();
// Ack ok stop.
if (log.isDebugEnabled())
log.debug(stopInfo());
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public ClusterNode failover(FailoverContext ctx, List<ClusterNode> top) {
assert ctx != null;
assert top != null;
if (top.isEmpty()) {
U.warn(log, "Received empty subgrid and is forced to fail.");
// Nowhere to failover to.
return null;
}
Integer failoverCnt = ctx.getJobResult().getJobContext().getAttribute(FAILOVER_ATTEMPT_COUNT_ATTR);
if (failoverCnt == null)
failoverCnt = 0;
if (failoverCnt > maxFailoverAttempts) {
U.error(log, "Failover count exceeded maximum failover attempts parameter [failedJob=" +
ctx.getJobResult().getJob() + ", maxFailoverAttempts=" + maxFailoverAttempts + ']');
return null;
}
if (failoverCnt == maxFailoverAttempts) {
U.warn(log, "Job failover failed because number of maximum failover attempts is exceeded [failedJob=" +
ctx.getJobResult().getJob() + ", maxFailoverAttempts=" + maxFailoverAttempts + ']');
return null;
}
try {
ClusterNode thief = null;
boolean isNodeFailed = false;
UUID thiefId = ctx.getJobResult().getJobContext().getAttribute(THIEF_NODE_ATTR);
if (thiefId != null) {
// Clear attribute.
ctx.getJobResult().getJobContext().setAttribute(THIEF_NODE_ATTR, null);
thief = getSpiContext().node(thiefId);
if (thief != null) {
// If sender != receiver.
if (thief.equals(ctx.getJobResult().getNode())) {
U.error(log, "Job stealer node is equal to job node (will fail-over using " +
"load-balancing): " + thief.id());
isNodeFailed = true;
thief = null;
}
else if (!top.contains(thief)) {
U.warn(log, "Thief node is not part of task topology (will fail-over using load-balancing) " +
"[thief=" + thiefId + ", topSize=" + top.size() + ']');
thief = null;
}
if (log.isDebugEnabled())
log.debug("Failing-over stolen job [from=" + ctx.getJobResult().getNode() + ", to=" +
thief + ']');
}
else {
isNodeFailed = true;
U.warn(log, "Thief node left grid (will fail-over using load balancing): " + thiefId);
}
}
else
isNodeFailed = true;
// If job was not stolen or stolen node is not part of topology,
// then failover the regular way.
if (thief == null) {
Collection<UUID> failedNodes = ctx.getJobResult().getJobContext().getAttribute(FAILED_NODE_LIST_ATTR);
if (failedNodes == null)
failedNodes = U.newHashSet(1);
if (isNodeFailed)
failedNodes.add(ctx.getJobResult().getNode().id());
// Set updated failed node set into job context.
ctx.getJobResult().getJobContext().setAttribute(FAILED_NODE_LIST_ATTR, failedNodes);
// Copy.
List<ClusterNode> newTop = new ArrayList<>(top.size());
for (ClusterNode n : top) {
// Add non-failed nodes to topology.
if (!failedNodes.contains(n.id()))
newTop.add(n);
}
if (newTop.isEmpty()) {
U.warn(log, "Received topology with only nodes that job had failed on (forced to fail) " +
"[failedNodes=" + failedNodes + ']');
// Nowhere to failover to.
return null;
}
thief = ctx.getBalancedNode(newTop);
if (thief == null)
U.warn(log, "Load balancer returned null node for topology: " + newTop);
}
if (isNodeFailed)
// This is a failover, not stealing.
failoverCnt++;
// Even if it was stealing and thief node left grid we assume
// that it is failover because of the fail.
ctx.getJobResult().getJobContext().setAttribute(FAILOVER_ATTEMPT_COUNT_ATTR, failoverCnt);
if (thief != null) {
totalFailedOverJobs++;
if (isNodeFailed) {
U.warn(log, "Failed over job to a new node [newNode=" + thief.id() +
", oldNode=" + ctx.getJobResult().getNode().id() +
", sesId=" + ctx.getTaskSession().getId() +
", job=" + ctx.getJobResult().getJob() +
", jobCtx=" + ctx.getJobResult().getJobContext() +
", task=" + ctx.getTaskSession().getTaskName() + ']');
}
else {
totalStolenJobs++;
if (log.isInfoEnabled())
log.info("Stealing job to a new node [newNode=" + thief.id() +
", oldNode=" + ctx.getJobResult().getNode().id() +
", sesId=" + ctx.getTaskSession().getId() +
", job=" + ctx.getJobResult().getJob() +
", jobCtx=" + ctx.getJobResult().getJobContext() +
", task=" + ctx.getTaskSession().getTaskName() + ']');
}
}
return thief;
}
catch (IgniteException e) {
U.error(log, "Failed to get next balanced node for failover: " + ctx, e);
return null;
}
}
/** {@inheritDoc} */
@Override protected List<String> getConsistentAttributeNames() {
return Collections.singletonList(createSpiAttributeName(MAX_FAILOVER_ATTEMPT_ATTR));
}
/** {@inheritDoc} */
@Override public JobStealingFailoverSpi setName(String name) {
super.setName(name);
return this;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(JobStealingFailoverSpi.class, this);
}
/**
* MBean implementation for JobStealingFailoverSpi.
*/
private class JobStealingFailoverSpiMBeanImpl extends IgniteSpiMBeanAdapter implements JobStealingFailoverSpiMBean {
/** {@inheritDoc} */
public JobStealingFailoverSpiMBeanImpl(IgniteSpiAdapter spiAdapter) {
super(spiAdapter);
}
/** {@inheritDoc} */
@Override public int getMaximumFailoverAttempts() {
return JobStealingFailoverSpi.this.getMaximumFailoverAttempts();
}
/** {@inheritDoc} */
@Override public int getTotalFailedOverJobsCount() {
return JobStealingFailoverSpi.this.getTotalFailedOverJobsCount();
}
/** {@inheritDoc} */
@Override public int getTotalStolenJobsCount() {
return JobStealingFailoverSpi.this.getTotalStolenJobsCount();
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.knowledge.qnamaker.implementation;
import com.microsoft.azure.cognitiveservices.knowledge.qnamaker.models.UpdateSettingsOptionalParameter;
import retrofit2.Retrofit;
import com.microsoft.azure.cognitiveservices.knowledge.qnamaker.EndpointSettings;
import com.google.common.base.Joiner;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.cognitiveservices.knowledge.qnamaker.models.EndpointSettingsDTO;
import com.microsoft.azure.cognitiveservices.knowledge.qnamaker.models.EndpointSettingsDTOActiveLearning;
import com.microsoft.azure.cognitiveservices.knowledge.qnamaker.models.ErrorResponseException;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.PATCH;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in EndpointSettings.
*/
public class EndpointSettingsImpl implements EndpointSettings {
/** The Retrofit service to perform REST calls. */
private EndpointSettingsService service;
/** The service client containing this operation class. */
private QnAMakerClientImpl client;
/**
* Initializes an instance of EndpointSettingsImpl.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public EndpointSettingsImpl(Retrofit retrofit, QnAMakerClientImpl client) {
this.service = retrofit.create(EndpointSettingsService.class);
this.client = client;
}
/**
* The interface defining all the services for EndpointSettings to be
* used by Retrofit to perform actually REST calls.
*/
interface EndpointSettingsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.cognitiveservices.knowledge.qnamaker.EndpointSettings getSettings" })
@GET("endpointSettings")
Observable<Response<ResponseBody>> getSettings(@Header("accept-language") String acceptLanguage, @Header("x-ms-parameterized-host") String parameterizedHost, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.cognitiveservices.knowledge.qnamaker.EndpointSettings updateSettings" })
@PATCH("endpointSettings")
Observable<Response<ResponseBody>> updateSettings(@Header("accept-language") String acceptLanguage, @Body EndpointSettingsDTO endpointSettingsPayload, @Header("x-ms-parameterized-host") String parameterizedHost, @Header("User-Agent") String userAgent);
}
/**
* Gets endpoint settings for an endpoint.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the EndpointSettingsDTO object if successful.
*/
public EndpointSettingsDTO getSettings() {
return getSettingsWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Gets endpoint settings for an endpoint.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<EndpointSettingsDTO> getSettingsAsync(final ServiceCallback<EndpointSettingsDTO> serviceCallback) {
return ServiceFuture.fromResponse(getSettingsWithServiceResponseAsync(), serviceCallback);
}
/**
* Gets endpoint settings for an endpoint.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the EndpointSettingsDTO object
*/
public Observable<EndpointSettingsDTO> getSettingsAsync() {
return getSettingsWithServiceResponseAsync().map(new Func1<ServiceResponse<EndpointSettingsDTO>, EndpointSettingsDTO>() {
@Override
public EndpointSettingsDTO call(ServiceResponse<EndpointSettingsDTO> response) {
return response.body();
}
});
}
/**
* Gets endpoint settings for an endpoint.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the EndpointSettingsDTO object
*/
public Observable<ServiceResponse<EndpointSettingsDTO>> getSettingsWithServiceResponseAsync() {
if (this.client.endpoint() == null) {
throw new IllegalArgumentException("Parameter this.client.endpoint() is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{Endpoint}", this.client.endpoint());
return service.getSettings(this.client.acceptLanguage(), parameterizedHost, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<EndpointSettingsDTO>>>() {
@Override
public Observable<ServiceResponse<EndpointSettingsDTO>> call(Response<ResponseBody> response) {
try {
ServiceResponse<EndpointSettingsDTO> clientResponse = getSettingsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<EndpointSettingsDTO> getSettingsDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<EndpointSettingsDTO, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<EndpointSettingsDTO>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
/**
* Updates endpoint settings for an endpoint.
*
* @param updateSettingsOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void updateSettings(UpdateSettingsOptionalParameter updateSettingsOptionalParameter) {
updateSettingsWithServiceResponseAsync(updateSettingsOptionalParameter).toBlocking().single().body();
}
/**
* Updates endpoint settings for an endpoint.
*
* @param updateSettingsOptionalParameter the object representing the optional parameters to be set before calling this API
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> updateSettingsAsync(UpdateSettingsOptionalParameter updateSettingsOptionalParameter, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(updateSettingsWithServiceResponseAsync(updateSettingsOptionalParameter), serviceCallback);
}
/**
* Updates endpoint settings for an endpoint.
*
* @param updateSettingsOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> updateSettingsAsync(UpdateSettingsOptionalParameter updateSettingsOptionalParameter) {
return updateSettingsWithServiceResponseAsync(updateSettingsOptionalParameter).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Updates endpoint settings for an endpoint.
*
* @param updateSettingsOptionalParameter the object representing the optional parameters to be set before calling this API
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> updateSettingsWithServiceResponseAsync(UpdateSettingsOptionalParameter updateSettingsOptionalParameter) {
if (this.client.endpoint() == null) {
throw new IllegalArgumentException("Parameter this.client.endpoint() is required and cannot be null.");
}
final EndpointSettingsDTOActiveLearning activeLearning = updateSettingsOptionalParameter != null ? updateSettingsOptionalParameter.activeLearning() : null;
return updateSettingsWithServiceResponseAsync(activeLearning);
}
/**
* Updates endpoint settings for an endpoint.
*
* @param activeLearning Active Learning settings of the endpoint.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> updateSettingsWithServiceResponseAsync(EndpointSettingsDTOActiveLearning activeLearning) {
if (this.client.endpoint() == null) {
throw new IllegalArgumentException("Parameter this.client.endpoint() is required and cannot be null.");
}
Validator.validate(activeLearning);
EndpointSettingsDTO endpointSettingsPayload = new EndpointSettingsDTO();
endpointSettingsPayload.withActiveLearning(activeLearning);
String parameterizedHost = Joiner.on(", ").join("{Endpoint}", this.client.endpoint());
return service.updateSettings(this.client.acceptLanguage(), endpointSettingsPayload, parameterizedHost, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = updateSettingsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> updateSettingsDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
@Override
public EndpointSettingsUpdateSettingsParameters updateSettings() {
return new EndpointSettingsUpdateSettingsParameters(this);
}
/**
* Internal class implementing EndpointSettingsUpdateSettingsDefinition.
*/
class EndpointSettingsUpdateSettingsParameters implements EndpointSettingsUpdateSettingsDefinition {
private EndpointSettingsImpl parent;
private EndpointSettingsDTOActiveLearning activeLearning;
/**
* Constructor.
* @param parent the parent object.
*/
EndpointSettingsUpdateSettingsParameters(EndpointSettingsImpl parent) {
this.parent = parent;
}
@Override
public EndpointSettingsUpdateSettingsParameters withActiveLearning(EndpointSettingsDTOActiveLearning activeLearning) {
this.activeLearning = activeLearning;
return this;
}
@Override
public void execute() {
updateSettingsWithServiceResponseAsync(activeLearning).toBlocking().single().body();
}
@Override
public Observable<Void> executeAsync() {
return updateSettingsWithServiceResponseAsync(activeLearning).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig;
import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfigUpdate;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfigUpdate;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalInferModelAction extends ActionType<InternalInferModelAction.Response> {
public static final InternalInferModelAction INSTANCE = new InternalInferModelAction();
public static final String NAME = "cluster:internal/xpack/ml/inference/infer";
private InternalInferModelAction() {
super(NAME, Response::new);
}
public static class Request extends ActionRequest {
private final String modelId;
private final List<Map<String, Object>> objectsToInfer;
private final InferenceConfigUpdate update;
private final boolean previouslyLicensed;
public Request(String modelId, boolean previouslyLicensed) {
this(modelId, Collections.emptyList(), RegressionConfigUpdate.EMPTY_PARAMS, previouslyLicensed);
}
public Request(String modelId,
List<Map<String, Object>> objectsToInfer,
InferenceConfigUpdate inferenceConfig,
boolean previouslyLicensed) {
this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID);
this.objectsToInfer = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(objectsToInfer, "objects_to_infer"));
this.update = ExceptionsHelper.requireNonNull(inferenceConfig, "inference_config");
this.previouslyLicensed = previouslyLicensed;
}
public Request(String modelId,
Map<String, Object> objectToInfer,
InferenceConfigUpdate update,
boolean previouslyLicensed) {
this(modelId,
Collections.singletonList(ExceptionsHelper.requireNonNull(objectToInfer, "objects_to_infer")),
update,
previouslyLicensed);
}
public Request(StreamInput in) throws IOException {
super(in);
this.modelId = in.readString();
this.objectsToInfer = Collections.unmodifiableList(in.readList(StreamInput::readMap));
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
this.update = in.readNamedWriteable(InferenceConfigUpdate.class);
} else {
InferenceConfig oldConfig = in.readNamedWriteable(InferenceConfig.class);
if (oldConfig instanceof RegressionConfig) {
this.update = RegressionConfigUpdate.fromConfig((RegressionConfig)oldConfig);
} else if (oldConfig instanceof ClassificationConfig) {
this.update = ClassificationConfigUpdate.fromConfig((ClassificationConfig) oldConfig);
} else {
throw new IOException("Unexpected configuration type [" + oldConfig.getName() + "]");
}
}
this.previouslyLicensed = in.readBoolean();
}
public String getModelId() {
return modelId;
}
public List<Map<String, Object>> getObjectsToInfer() {
return objectsToInfer;
}
public InferenceConfigUpdate getUpdate() {
return update;
}
public boolean isPreviouslyLicensed() {
return previouslyLicensed;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(modelId);
out.writeCollection(objectsToInfer, StreamOutput::writeMap);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
out.writeNamedWriteable(update);
} else {
out.writeNamedWriteable(update.toConfig());
}
out.writeBoolean(previouslyLicensed);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InternalInferModelAction.Request that = (InternalInferModelAction.Request) o;
return Objects.equals(modelId, that.modelId)
&& Objects.equals(update, that.update)
&& Objects.equals(previouslyLicensed, that.previouslyLicensed)
&& Objects.equals(objectsToInfer, that.objectsToInfer);
}
@Override
public int hashCode() {
return Objects.hash(modelId, objectsToInfer, update, previouslyLicensed);
}
}
public static class Response extends ActionResponse {
private final List<InferenceResults> inferenceResults;
private final boolean isLicensed;
public Response(List<InferenceResults> inferenceResults, boolean isLicensed) {
super();
this.inferenceResults = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(inferenceResults, "inferenceResults"));
this.isLicensed = isLicensed;
}
public Response(StreamInput in) throws IOException {
super(in);
this.inferenceResults = Collections.unmodifiableList(in.readNamedWriteableList(InferenceResults.class));
this.isLicensed = in.readBoolean();
}
public List<InferenceResults> getInferenceResults() {
return inferenceResults;
}
public boolean isLicensed() {
return isLicensed;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteableList(inferenceResults);
out.writeBoolean(isLicensed);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InternalInferModelAction.Response that = (InternalInferModelAction.Response) o;
return isLicensed == that.isLicensed && Objects.equals(inferenceResults, that.inferenceResults);
}
@Override
public int hashCode() {
return Objects.hash(inferenceResults, isLicensed);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private List<InferenceResults> inferenceResults;
private boolean isLicensed;
public Builder setInferenceResults(List<InferenceResults> inferenceResults) {
this.inferenceResults = inferenceResults;
return this;
}
public Builder setLicensed(boolean licensed) {
isLicensed = licensed;
return this;
}
public Response build() {
return new Response(inferenceResults, isLicensed);
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch;
import com.fasterxml.jackson.core.JsonParseException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.Index;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
public final class ExceptionsHelper {
private static final Logger logger = LogManager.getLogger(ExceptionsHelper.class);
public static RuntimeException convertToRuntime(Exception e) {
if (e instanceof RuntimeException) {
return (RuntimeException) e;
}
return new ElasticsearchException(e);
}
public static ElasticsearchException convertToElastic(Exception e) {
if (e instanceof ElasticsearchException) {
return (ElasticsearchException) e;
}
return new ElasticsearchException(e);
}
public static RestStatus status(Throwable t) {
if (t != null) {
if (t instanceof ElasticsearchException) {
return ((ElasticsearchException) t).status();
} else if (t instanceof IllegalArgumentException) {
return RestStatus.BAD_REQUEST;
} else if (t instanceof JsonParseException) {
return RestStatus.BAD_REQUEST;
} else if (t instanceof EsRejectedExecutionException) {
return RestStatus.TOO_MANY_REQUESTS;
}
}
return RestStatus.INTERNAL_SERVER_ERROR;
}
public static Throwable unwrapCause(Throwable t) {
int counter = 0;
Throwable result = t;
while (result instanceof ElasticsearchWrapperException) {
if (result.getCause() == null) {
return result;
}
if (result.getCause() == result) {
return result;
}
if (counter++ > 10) {
// dear god, if we got more than 10 levels down, WTF? just bail
logger.warn("Exception cause unwrapping ran for 10 levels...", t);
return result;
}
result = result.getCause();
}
return result;
}
public static String stackTrace(Throwable e) {
StringWriter stackTraceStringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stackTraceStringWriter);
e.printStackTrace(printWriter);
return stackTraceStringWriter.toString();
}
public static String formatStackTrace(final StackTraceElement[] stackTrace) {
return Arrays.stream(stackTrace).skip(1).map(e -> "\tat " + e).collect(Collectors.joining("\n"));
}
/**
* Rethrows the first exception in the list and adds all remaining to the suppressed list.
* If the given list is empty no exception is thrown
*
*/
public static <T extends Throwable> void rethrowAndSuppress(List<T> exceptions) throws T {
T main = null;
for (T ex : exceptions) {
main = useOrSuppress(main, ex);
}
if (main != null) {
throw main;
}
}
/**
* Throws a runtime exception with all given exceptions added as suppressed.
* If the given list is empty no exception is thrown
*/
public static <T extends Throwable> void maybeThrowRuntimeAndSuppress(List<T> exceptions) {
T main = null;
for (T ex : exceptions) {
main = useOrSuppress(main, ex);
}
if (main != null) {
throw new ElasticsearchException(main);
}
}
public static <T extends Throwable> T useOrSuppress(T first, T second) {
if (first == null) {
return second;
} else {
first.addSuppressed(second);
}
return first;
}
private static final List<Class<? extends IOException>> CORRUPTION_EXCEPTIONS =
List.of(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class);
/**
* Looks at the given Throwable's and its cause(s) as well as any suppressed exceptions on the Throwable as well as its causes
* and returns the first corruption indicating exception (as defined by {@link #CORRUPTION_EXCEPTIONS}) it finds.
* @param t Throwable
* @return Corruption indicating exception if one is found, otherwise {@code null}
*/
public static IOException unwrapCorruption(Throwable t) {
return t == null ? null : ExceptionsHelper.<IOException>unwrapCausesAndSuppressed(t, cause -> {
for (Class<?> clazz : CORRUPTION_EXCEPTIONS) {
if (clazz.isInstance(cause)) {
return true;
}
}
return false;
}).orElse(null);
}
/**
* Looks at the given Throwable and its cause(s) and returns the first Throwable that is of one of the given classes or {@code null}
* if no matching Throwable is found. Unlike {@link #unwrapCorruption} this method does only check the given Throwable and its causes
* but does not look at any suppressed exceptions.
* @param t Throwable
* @param clazzes Classes to look for
* @return Matching Throwable if one is found, otherwise {@code null}
*/
public static Throwable unwrap(Throwable t, Class<?>... clazzes) {
if (t != null) {
final Set<Throwable> seen = Collections.newSetFromMap(new IdentityHashMap<>());
do {
if (seen.add(t) == false) {
return null;
}
for (Class<?> clazz : clazzes) {
if (clazz.isInstance(t)) {
return t;
}
}
} while ((t = t.getCause()) != null);
}
return null;
}
/**
* Throws the specified exception. If null if specified then <code>true</code> is returned.
*/
public static boolean reThrowIfNotNull(@Nullable Throwable e) {
if (e != null) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
return true;
}
@SuppressWarnings("unchecked")
public static <T extends Throwable> Optional<T> unwrapCausesAndSuppressed(Throwable cause, Predicate<Throwable> predicate) {
if (predicate.test(cause)) {
return Optional.of((T) cause);
}
final Queue<Throwable> queue = new LinkedList<>();
queue.add(cause);
final Set<Throwable> seen = Collections.newSetFromMap(new IdentityHashMap<>());
while (queue.isEmpty() == false) {
final Throwable current = queue.remove();
if (seen.add(current) == false) {
continue;
}
if (predicate.test(current)) {
return Optional.of((T) current);
}
Collections.addAll(queue, current.getSuppressed());
if (current.getCause() != null) {
queue.add(current.getCause());
}
}
return Optional.empty();
}
/**
* Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable.
*
* @param cause the root throwable
* @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable
*/
public static Optional<Error> maybeError(final Throwable cause) {
return unwrapCausesAndSuppressed(cause, t -> t instanceof Error);
}
/**
* If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be
* caught and bubbles up to the uncaught exception handler. Note that the cause tree is examined for any {@link Error}. See
* {@link #maybeError(Throwable)} for the semantics.
*
* @param throwable the throwable to possibly throw on another thread
*/
public static void maybeDieOnAnotherThread(final Throwable throwable) {
ExceptionsHelper.maybeError(throwable).ifPresent(error -> {
/*
* Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack
* contains statements that catch any throwable (e.g., Netty, and the JDK futures framework). This means that a rethrow here
* will not bubble up to where we want it to. So, we fork a thread and throw the exception from there where we are sure the
* stack does not contain statements that catch any throwable. We do not wrap the exception so as to not lose the original cause
* during exit.
*/
try {
// try to log the current stack trace
final String formatted = ExceptionsHelper.formatStackTrace(Thread.currentThread().getStackTrace());
logger.error("fatal error\n{}", formatted);
} finally {
new Thread(
() -> {
throw error;
})
.start();
}
});
}
/**
* Deduplicate the failures by exception message and index.
*/
public static ShardOperationFailedException[] groupBy(ShardOperationFailedException[] failures) {
List<ShardOperationFailedException> uniqueFailures = new ArrayList<>();
Set<GroupBy> reasons = new HashSet<>();
for (ShardOperationFailedException failure : failures) {
GroupBy reason = new GroupBy(failure);
if (reasons.contains(reason) == false) {
reasons.add(reason);
uniqueFailures.add(failure);
}
}
return uniqueFailures.toArray(new ShardOperationFailedException[0]);
}
private static class GroupBy {
final String reason;
final String index;
final Class<? extends Throwable> causeType;
GroupBy(ShardOperationFailedException failure) {
Throwable cause = failure.getCause();
//the index name from the failure contains the cluster alias when using CCS. Ideally failures should be grouped by
//index name and cluster alias. That's why the failure index name has the precedence over the one coming from the cause,
//which does not include the cluster alias.
String indexName = failure.index();
if (indexName == null) {
if (cause instanceof ElasticsearchException) {
final Index index = ((ElasticsearchException) cause).getIndex();
if (index != null) {
indexName = index.getName();
}
}
}
this.index = indexName;
this.reason = cause.getMessage();
this.causeType = cause.getClass();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GroupBy groupBy = (GroupBy) o;
return Objects.equals(reason, groupBy.reason) &&
Objects.equals(index, groupBy.index) &&
Objects.equals(causeType, groupBy.causeType);
}
@Override
public int hashCode() {
return Objects.hash(reason, index, causeType);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ilm;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ilm.Step.StepKey;
import org.mockito.Mockito;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.IsNull.notNullValue;
public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase<WaitForFollowShardTasksStep> {
@Override
protected WaitForFollowShardTasksStep createRandomInstance() {
StepKey stepKey = randomStepKey();
StepKey nextStepKey = randomStepKey();
return new WaitForFollowShardTasksStep(stepKey, nextStepKey, client);
}
@Override
protected WaitForFollowShardTasksStep mutateInstance(WaitForFollowShardTasksStep instance) {
StepKey key = instance.getKey();
StepKey nextKey = instance.getNextStepKey();
if (randomBoolean()) {
key = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5));
} else {
nextKey = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5));
}
return new WaitForFollowShardTasksStep(key, nextKey, instance.getClient());
}
@Override
protected WaitForFollowShardTasksStep copyInstance(WaitForFollowShardTasksStep instance) {
return new WaitForFollowShardTasksStep(instance.getKey(), instance.getNextStepKey(), instance.getClient());
}
public void testConditionMet() {
IndexMetadata indexMetadata = IndexMetadata.builder("follower-index")
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true"))
.putCustom(CCR_METADATA_KEY, Collections.emptyMap())
.numberOfShards(2)
.numberOfReplicas(0)
.build();
List<FollowStatsAction.StatsResponse> statsResponses = Arrays.asList(
new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)),
new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 3, 3))
);
mockFollowStatsCall(indexMetadata.getIndex().getName(), statsResponses);
final boolean[] conditionMetHolder = new boolean[1];
final ToXContentObject[] informationContextHolder = new ToXContentObject[1];
final Exception[] exceptionHolder = new Exception[1];
createRandomInstance().evaluateCondition(
Metadata.builder().put(indexMetadata, true).build(),
indexMetadata.getIndex(),
new AsyncWaitStep.Listener() {
@Override
public void onResponse(boolean conditionMet, ToXContentObject informationContext) {
conditionMetHolder[0] = conditionMet;
informationContextHolder[0] = informationContext;
}
@Override
public void onFailure(Exception e) {
exceptionHolder[0] = e;
}
},
MASTER_TIMEOUT
);
assertThat(conditionMetHolder[0], is(true));
assertThat(informationContextHolder[0], nullValue());
assertThat(exceptionHolder[0], nullValue());
}
public void testConditionNotMetShardsNotInSync() {
IndexMetadata indexMetadata = IndexMetadata.builder("follower-index")
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true"))
.putCustom(CCR_METADATA_KEY, Collections.emptyMap())
.numberOfShards(2)
.numberOfReplicas(0)
.build();
List<FollowStatsAction.StatsResponse> statsResponses = Arrays.asList(
new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)),
new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 8, 3))
);
mockFollowStatsCall(indexMetadata.getIndex().getName(), statsResponses);
final boolean[] conditionMetHolder = new boolean[1];
final ToXContentObject[] informationContextHolder = new ToXContentObject[1];
final Exception[] exceptionHolder = new Exception[1];
createRandomInstance().evaluateCondition(
Metadata.builder().put(indexMetadata, true).build(),
indexMetadata.getIndex(),
new AsyncWaitStep.Listener() {
@Override
public void onResponse(boolean conditionMet, ToXContentObject informationContext) {
conditionMetHolder[0] = conditionMet;
informationContextHolder[0] = informationContext;
}
@Override
public void onFailure(Exception e) {
exceptionHolder[0] = e;
}
},
MASTER_TIMEOUT
);
assertThat(conditionMetHolder[0], is(false));
assertThat(informationContextHolder[0], notNullValue());
assertThat(exceptionHolder[0], nullValue());
WaitForFollowShardTasksStep.Info info = (WaitForFollowShardTasksStep.Info) informationContextHolder[0];
assertThat(info.getShardFollowTaskInfos().size(), equalTo(1));
assertThat(info.getShardFollowTaskInfos().get(0).getShardId(), equalTo(1));
assertThat(info.getShardFollowTaskInfos().get(0).getLeaderGlobalCheckpoint(), equalTo(8L));
assertThat(info.getShardFollowTaskInfos().get(0).getFollowerGlobalCheckpoint(), equalTo(3L));
}
public void testConditionNotMetNotAFollowerIndex() {
IndexMetadata indexMetadata = IndexMetadata.builder("follower-index")
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true"))
.numberOfShards(2)
.numberOfReplicas(0)
.build();
final boolean[] conditionMetHolder = new boolean[1];
final ToXContentObject[] informationContextHolder = new ToXContentObject[1];
final Exception[] exceptionHolder = new Exception[1];
createRandomInstance().evaluateCondition(
Metadata.builder().put(indexMetadata, true).build(),
indexMetadata.getIndex(),
new AsyncWaitStep.Listener() {
@Override
public void onResponse(boolean conditionMet, ToXContentObject informationContext) {
conditionMetHolder[0] = conditionMet;
informationContextHolder[0] = informationContext;
}
@Override
public void onFailure(Exception e) {
exceptionHolder[0] = e;
}
},
MASTER_TIMEOUT
);
assertThat(conditionMetHolder[0], is(true));
assertThat(informationContextHolder[0], nullValue());
assertThat(exceptionHolder[0], nullValue());
Mockito.verifyNoMoreInteractions(client);
}
private static ShardFollowNodeTaskStatus createShardFollowTaskStatus(int shardId, long leaderGCP, long followerGCP) {
return new ShardFollowNodeTaskStatus(
"remote",
"leader-index",
"follower-index",
shardId,
leaderGCP,
-1,
followerGCP,
-1,
-1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
Collections.emptyNavigableMap(),
0,
null
);
}
private void mockFollowStatsCall(String expectedIndexName, List<FollowStatsAction.StatsResponse> statsResponses) {
Mockito.doAnswer(invocationOnMock -> {
FollowStatsAction.StatsRequest request = (FollowStatsAction.StatsRequest) invocationOnMock.getArguments()[1];
assertThat(request.indices().length, equalTo(1));
assertThat(request.indices()[0], equalTo(expectedIndexName));
@SuppressWarnings("unchecked")
ActionListener<FollowStatsAction.StatsResponses> listener = (ActionListener<FollowStatsAction.StatsResponses>) invocationOnMock
.getArguments()[2];
listener.onResponse(new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), statsResponses));
return null;
}).when(client).execute(Mockito.eq(FollowStatsAction.INSTANCE), Mockito.any(), Mockito.any());
}
}
| |
/**
*
* Copyright (c) Microsoft and contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
package com.microsoft.windowsazure.management.compute.models;
import com.microsoft.windowsazure.core.LazyArrayList;
import java.net.URI;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Parameters supplied to the Create Virtual Machine VM Image operation.
*/
public class VirtualMachineVMImageCreateParameters {
private ArrayList<DataDiskConfigurationCreateParameters> dataDiskConfigurations;
/**
* Optional. Specifies configuration information for the data disks that are
* associated with the image. A VM Image might not have data disks
* associated with it.
* @return The DataDiskConfigurations value.
*/
public ArrayList<DataDiskConfigurationCreateParameters> getDataDiskConfigurations() {
return this.dataDiskConfigurations;
}
/**
* Optional. Specifies configuration information for the data disks that are
* associated with the image. A VM Image might not have data disks
* associated with it.
* @param dataDiskConfigurationsValue The DataDiskConfigurations value.
*/
public void setDataDiskConfigurations(final ArrayList<DataDiskConfigurationCreateParameters> dataDiskConfigurationsValue) {
this.dataDiskConfigurations = dataDiskConfigurationsValue;
}
private String description;
/**
* Optional. Gets or sets the description of the image.
* @return The Description value.
*/
public String getDescription() {
return this.description;
}
/**
* Optional. Gets or sets the description of the image.
* @param descriptionValue The Description value.
*/
public void setDescription(final String descriptionValue) {
this.description = descriptionValue;
}
private String eula;
/**
* Optional. Gets or sets the End User License Agreement that is associated
* with the image. The value for this element is a string, but it is
* recommended that the value be a URL that points to a EULA.
* @return The Eula value.
*/
public String getEula() {
return this.eula;
}
/**
* Optional. Gets or sets the End User License Agreement that is associated
* with the image. The value for this element is a string, but it is
* recommended that the value be a URL that points to a EULA.
* @param eulaValue The Eula value.
*/
public void setEula(final String eulaValue) {
this.eula = eulaValue;
}
private String iconUri;
/**
* Optional. Gets or sets the URI to the icon that is displayed for the
* image in the Management Portal.
* @return The IconUri value.
*/
public String getIconUri() {
return this.iconUri;
}
/**
* Optional. Gets or sets the URI to the icon that is displayed for the
* image in the Management Portal.
* @param iconUriValue The IconUri value.
*/
public void setIconUri(final String iconUriValue) {
this.iconUri = iconUriValue;
}
private String imageFamily;
/**
* Optional. Gets or sets a value that can be used to group VM Images.
* @return The ImageFamily value.
*/
public String getImageFamily() {
return this.imageFamily;
}
/**
* Optional. Gets or sets a value that can be used to group VM Images.
* @param imageFamilyValue The ImageFamily value.
*/
public void setImageFamily(final String imageFamilyValue) {
this.imageFamily = imageFamilyValue;
}
private String label;
/**
* Required. Gets or sets an identifier for the image.
* @return The Label value.
*/
public String getLabel() {
return this.label;
}
/**
* Required. Gets or sets an identifier for the image.
* @param labelValue The Label value.
*/
public void setLabel(final String labelValue) {
this.label = labelValue;
}
private String language;
/**
* Optional. Gets or sets the language of the image.
* @return The Language value.
*/
public String getLanguage() {
return this.language;
}
/**
* Optional. Gets or sets the language of the image.
* @param languageValue The Language value.
*/
public void setLanguage(final String languageValue) {
this.language = languageValue;
}
private String name;
/**
* Required. Gets or sets the name of the image.
* @return The Name value.
*/
public String getName() {
return this.name;
}
/**
* Required. Gets or sets the name of the image.
* @param nameValue The Name value.
*/
public void setName(final String nameValue) {
this.name = nameValue;
}
private OSDiskConfigurationCreateParameters oSDiskConfiguration;
/**
* Required. Gets or sets configuration information for the operating system
* disk that is associated with the image.
* @return The OSDiskConfiguration value.
*/
public OSDiskConfigurationCreateParameters getOSDiskConfiguration() {
return this.oSDiskConfiguration;
}
/**
* Required. Gets or sets configuration information for the operating system
* disk that is associated with the image.
* @param oSDiskConfigurationValue The OSDiskConfiguration value.
*/
public void setOSDiskConfiguration(final OSDiskConfigurationCreateParameters oSDiskConfigurationValue) {
this.oSDiskConfiguration = oSDiskConfigurationValue;
}
private URI privacyUri;
/**
* Optional. Gets or sets the URI that points to a document that contains
* the privacy policy related to the image.
* @return The PrivacyUri value.
*/
public URI getPrivacyUri() {
return this.privacyUri;
}
/**
* Optional. Gets or sets the URI that points to a document that contains
* the privacy policy related to the image.
* @param privacyUriValue The PrivacyUri value.
*/
public void setPrivacyUri(final URI privacyUriValue) {
this.privacyUri = privacyUriValue;
}
private Calendar publishedDate;
/**
* Optional. Gets or sets the date when the image was added to the image
* repository.
* @return The PublishedDate value.
*/
public Calendar getPublishedDate() {
return this.publishedDate;
}
/**
* Optional. Gets or sets the date when the image was added to the image
* repository.
* @param publishedDateValue The PublishedDate value.
*/
public void setPublishedDate(final Calendar publishedDateValue) {
this.publishedDate = publishedDateValue;
}
private String recommendedVMSize;
/**
* Optional. Gets or sets the size to use for the Virtual Machine that is
* created from the VM Image.
* @return The RecommendedVMSize value.
*/
public String getRecommendedVMSize() {
return this.recommendedVMSize;
}
/**
* Optional. Gets or sets the size to use for the Virtual Machine that is
* created from the VM Image.
* @param recommendedVMSizeValue The RecommendedVMSize value.
*/
public void setRecommendedVMSize(final String recommendedVMSizeValue) {
this.recommendedVMSize = recommendedVMSizeValue;
}
private Boolean showInGui;
/**
* Optional. Gets or sets whether the VM Images should be listed in the
* portal.
* @return The ShowInGui value.
*/
public Boolean isShowInGui() {
return this.showInGui;
}
/**
* Optional. Gets or sets whether the VM Images should be listed in the
* portal.
* @param showInGuiValue The ShowInGui value.
*/
public void setShowInGui(final Boolean showInGuiValue) {
this.showInGui = showInGuiValue;
}
private String smallIconUri;
/**
* Optional. Gets or sets the URI to the small icon that is displayed for
* the image in the Management Portal.
* @return The SmallIconUri value.
*/
public String getSmallIconUri() {
return this.smallIconUri;
}
/**
* Optional. Gets or sets the URI to the small icon that is displayed for
* the image in the Management Portal.
* @param smallIconUriValue The SmallIconUri value.
*/
public void setSmallIconUri(final String smallIconUriValue) {
this.smallIconUri = smallIconUriValue;
}
/**
* Initializes a new instance of the VirtualMachineVMImageCreateParameters
* class.
*
*/
public VirtualMachineVMImageCreateParameters() {
this.setDataDiskConfigurations(new LazyArrayList<DataDiskConfigurationCreateParameters>());
}
/**
* Initializes a new instance of the VirtualMachineVMImageCreateParameters
* class with required arguments.
*
* @param name Gets or sets the name of the image.
* @param label Gets or sets an identifier for the image.
* @param oSDiskConfiguration Gets or sets configuration information for the
* operating system disk that is associated with the image.
*/
public VirtualMachineVMImageCreateParameters(String name, String label, OSDiskConfigurationCreateParameters oSDiskConfiguration) {
this();
if (name == null) {
throw new NullPointerException("name");
}
if (label == null) {
throw new NullPointerException("label");
}
if (oSDiskConfiguration == null) {
throw new NullPointerException("oSDiskConfiguration");
}
this.setName(name);
this.setLabel(label);
this.setOSDiskConfiguration(oSDiskConfiguration);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.securityinsights.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.securityinsights.fluent.EntitiesRelationsClient;
import com.azure.resourcemanager.securityinsights.fluent.models.RelationInner;
import com.azure.resourcemanager.securityinsights.models.RelationList;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in EntitiesRelationsClient. */
public final class EntitiesRelationsClientImpl implements EntitiesRelationsClient {
private final ClientLogger logger = new ClientLogger(EntitiesRelationsClientImpl.class);
/** The proxy service used to perform REST calls. */
private final EntitiesRelationsService service;
/** The service client containing this operation class. */
private final SecurityInsightsImpl client;
/**
* Initializes an instance of EntitiesRelationsClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
EntitiesRelationsClientImpl(SecurityInsightsImpl client) {
this.service =
RestProxy.create(EntitiesRelationsService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for SecurityInsightsEntitiesRelations to be used by the proxy service to
* perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "SecurityInsightsEnti")
private interface EntitiesRelationsService {
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights"
+ "/workspaces/{workspaceName}/providers/Microsoft.SecurityInsights/entities/{entityId}/relations")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RelationList>> list(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("workspaceName") String workspaceName,
@PathParam("entityId") String entityId,
@QueryParam("$filter") String filter,
@QueryParam("$orderby") String orderby,
@QueryParam("$top") Integer top,
@QueryParam("$skipToken") String skipToken,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RelationList>> listNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @param filter Filters the results, based on a Boolean condition. Optional.
* @param orderby Sorts the results. Optional.
* @param top Returns only the first n results. Optional.
* @param skipToken Skiptoken is only used if a previous operation returned a partial result. If a previous response
* contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
* specifies a starting point to use for subsequent calls. Optional.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RelationInner>> listSinglePageAsync(
String resourceGroupName,
String workspaceName,
String entityId,
String filter,
String orderby,
Integer top,
String skipToken) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (workspaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null."));
}
if (entityId == null) {
return Mono.error(new IllegalArgumentException("Parameter entityId is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getApiVersion(),
this.client.getSubscriptionId(),
resourceGroupName,
workspaceName,
entityId,
filter,
orderby,
top,
skipToken,
accept,
context))
.<PagedResponse<RelationInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @param filter Filters the results, based on a Boolean condition. Optional.
* @param orderby Sorts the results. Optional.
* @param top Returns only the first n results. Optional.
* @param skipToken Skiptoken is only used if a previous operation returned a partial result. If a previous response
* contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
* specifies a starting point to use for subsequent calls. Optional.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RelationInner>> listSinglePageAsync(
String resourceGroupName,
String workspaceName,
String entityId,
String filter,
String orderby,
Integer top,
String skipToken,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (workspaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null."));
}
if (entityId == null) {
return Mono.error(new IllegalArgumentException("Parameter entityId is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.list(
this.client.getEndpoint(),
this.client.getApiVersion(),
this.client.getSubscriptionId(),
resourceGroupName,
workspaceName,
entityId,
filter,
orderby,
top,
skipToken,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @param filter Filters the results, based on a Boolean condition. Optional.
* @param orderby Sorts the results. Optional.
* @param top Returns only the first n results. Optional.
* @param skipToken Skiptoken is only used if a previous operation returned a partial result. If a previous response
* contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
* specifies a starting point to use for subsequent calls. Optional.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<RelationInner> listAsync(
String resourceGroupName,
String workspaceName,
String entityId,
String filter,
String orderby,
Integer top,
String skipToken) {
return new PagedFlux<>(
() -> listSinglePageAsync(resourceGroupName, workspaceName, entityId, filter, orderby, top, skipToken),
nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<RelationInner> listAsync(String resourceGroupName, String workspaceName, String entityId) {
final String filter = null;
final String orderby = null;
final Integer top = null;
final String skipToken = null;
return new PagedFlux<>(
() -> listSinglePageAsync(resourceGroupName, workspaceName, entityId, filter, orderby, top, skipToken),
nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @param filter Filters the results, based on a Boolean condition. Optional.
* @param orderby Sorts the results. Optional.
* @param top Returns only the first n results. Optional.
* @param skipToken Skiptoken is only used if a previous operation returned a partial result. If a previous response
* contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
* specifies a starting point to use for subsequent calls. Optional.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<RelationInner> listAsync(
String resourceGroupName,
String workspaceName,
String entityId,
String filter,
String orderby,
Integer top,
String skipToken,
Context context) {
return new PagedFlux<>(
() ->
listSinglePageAsync(
resourceGroupName, workspaceName, entityId, filter, orderby, top, skipToken, context),
nextLink -> listNextSinglePageAsync(nextLink, context));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<RelationInner> list(String resourceGroupName, String workspaceName, String entityId) {
final String filter = null;
final String orderby = null;
final Integer top = null;
final String skipToken = null;
return new PagedIterable<>(
listAsync(resourceGroupName, workspaceName, entityId, filter, orderby, top, skipToken));
}
/**
* Gets all relations of an entity.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace.
* @param entityId entity ID.
* @param filter Filters the results, based on a Boolean condition. Optional.
* @param orderby Sorts the results. Optional.
* @param top Returns only the first n results. Optional.
* @param skipToken Skiptoken is only used if a previous operation returned a partial result. If a previous response
* contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
* specifies a starting point to use for subsequent calls. Optional.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all relations of an entity.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<RelationInner> list(
String resourceGroupName,
String workspaceName,
String entityId,
String filter,
String orderby,
Integer top,
String skipToken,
Context context) {
return new PagedIterable<>(
listAsync(resourceGroupName, workspaceName, entityId, filter, orderby, top, skipToken, context));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of relations along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RelationInner>> listNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<RelationInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of relations along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RelationInner>> listNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright 2008-2018 Ivan Dejanovic and Quine Interactive
* www.quineinteractive.com
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package com.quine.javatree;
import java.io.File;
import java.io.FileOutputStream;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.JTree;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
/**
* JavaTreeActionControler performs required actions for Actions from JavaTreeControlller. This class is used to
* implements all logic needed in order to provide necessary functionality to Actions of JavaTreeController class.
*
* @author Ivan Dejanovic
*
* @version 1.0
*
* @since 1.0
*
*/
public class JavaTreeActionController {
/**
* Creates standard JavaTreeActionControler
*/
public JavaTreeActionController() {
}
/**
* Creates a default JTree with one root element
*
* @return tree
*/
public JTree newAction() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode(new JavaTreeNodeObject("main"));
return new JTree(root);
}
/**
* Opens a file user selects, read it and construct a tree based on the file content. Returns reference to a tree or
* null if error occurred.
*
* @return tree
*/
public JTree openAction() {
File file = chooseFile(true);
if (file == null) {
JOptionPane.showMessageDialog(null, "Wrong type of file selected.\nFile extenstion needs to be .jtd.");
return null;
}
Document document = loadDocumentFromFile(file);
if (document == null) {
JOptionPane.showMessageDialog(null, "Error while parsing document.");
return null;
}
return convertDocumentToJTree(document);
}
/**
* Construct a DOM object from a tree and saves it to XML file. Returns true if method was successful, false if
* error occurred.
*
* @param tree
*
* @return status
*/
public boolean saveAction(JTree tree) {
Document document = convertJTreeToXML(tree);
if (document == null) {
JOptionPane.showMessageDialog(null, "Error while converting data from tree to document.");
return false;
}
File file = chooseFile(false);
if (file == null) {
JOptionPane.showMessageDialog(null, "Wrong type of file selected.\nFile extenstion needs to be .jtd.");
return false;
}
return saveDocumentToFile(document, file);
}
/**
* Add child node to selected node of a tree.
*
* @param tree
*/
public void addChildAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
addChildToGivenNodeAction(tree, node);
}
/**
* Add child to given node of a tree.
*
* @param tree
* @param node
*/
public void addChildToGivenNodeAction(JTree tree, DefaultMutableTreeNode node) {
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
model.insertNodeInto(new DefaultMutableTreeNode(new JavaTreeNodeObject()), node, model.getChildCount(node));
}
/**
* Delete selected node of a tree.
*
* @param tree
*/
public void deleteNodeAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
deleteGivenNodeAction(tree, node);
}
/**
* Delete given node of a tree
*
* @param tree
* @param node
*/
public void deleteGivenNodeAction(JTree tree, DefaultMutableTreeNode node) {
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
model.removeNodeFromParent(node);
}
/**
* Move up selected node of a tree.
*
* @param tree
*/
public void moveUpAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
moveGivenNodeUpAction(tree, node);
}
/**
* Move up given node of a tree.
*
* @param tree
* @param node
*/
public void moveGivenNodeUpAction(JTree tree, DefaultMutableTreeNode node) {
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent();
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
int index = model.getIndexOfChild(parent, node);
if (index > 0) {
model.removeNodeFromParent(node);
model.insertNodeInto(node, parent, --index);
}
}
/**
* Move down selected node of a tree.
*
* @param tree
*/
public void moveDownAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
moveGivenNodeDownAction(tree, node);
}
/**
* Move down given node of a tree.
*
* @param tree
* @param node
*/
public void moveGivenNodeDownAction(JTree tree, DefaultMutableTreeNode node) {
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent();
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
int index = model.getIndexOfChild(parent, node);
if (index < (model.getChildCount(parent) - 1)) {
model.removeNodeFromParent(node);
model.insertNodeInto(node, parent, ++index);
}
}
/**
* Move level up selected node of a tree.
*
* @param tree
*/
public void moveLevelUpAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
moveGivenNodeLevelUpAction(tree, node);
}
/**
* Move level up given node of a tree.
*
* @param tree
* @param node
*/
public void moveGivenNodeLevelUpAction(JTree tree, DefaultMutableTreeNode node) {
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent();
DefaultMutableTreeNode superParent = (DefaultMutableTreeNode) parent.getParent();
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
if (!(parent.isRoot())) {
model.removeNodeFromParent(node);
model.insertNodeInto(node, superParent, model.getChildCount(superParent));
}
}
/**
* Move level down selected node of a tree.
*
* @param tree
*/
public void moveLevelDownAction(JTree tree) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
moveGivenNodeLevelDownAction(tree, node);
}
/**
* Move level down given node of a tree.
*
* @param tree
* @param node
*/
public void moveGivenNodeLevelDownAction(JTree tree, DefaultMutableTreeNode node) {
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent();
DefaultMutableTreeNode nextNode = (DefaultMutableTreeNode) parent.getChildAfter(node);
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
if (nextNode != null) {
model.removeNodeFromParent(node);
model.insertNodeInto(node, nextNode, model.getChildCount(nextNode));
}
}
/**
* Displays help.
*/
public void HelpAction() {
JOptionPane.showMessageDialog(null, "Java Tree data organizer.");
}
/**
* Displays about.
*/
public void AboutAction() {
JOptionPane
.showMessageDialog(null,
"JavaTree version 1.0.\nCopyright Quine Interactive 2011.\nwww.quineinteractice.com");
}
/**
* Creates file chooser dialog and returns a selected file
*
* @return file
*/
private File chooseFile(boolean loadFlag) {
JFileChooser fileChooser = new JFileChooser();
fileChooser.setFileFilter(new javax.swing.filechooser.FileFilter() {
public boolean accept(File f) {
return f.isDirectory() || f.getName().toLowerCase().endsWith(".jtd");
}
public String getDescription() {
return "Java Tree document";
}
});
int returnStatus;
if (loadFlag) {
returnStatus = fileChooser.showOpenDialog(null);
} else {
returnStatus = fileChooser.showSaveDialog(null);
}
if (returnStatus != JFileChooser.APPROVE_OPTION)
return null;
return fileChooser.getSelectedFile();
}
/**
* Convert tree it receives as parameter to document.
*
* @param tree
*
* @return document
*/
private Document convertJTreeToXML(JTree tree) {
Document document = null;
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
document = builder.newDocument();
Element rootElement = document.createElement("JavaTreeXML");
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) tree.getModel().getRoot();
rootElement.appendChild(createNodeElement(document, rootNode));
document.appendChild(rootElement);
} catch (Exception e) {
e.printStackTrace();
document = null;
}
return document;
}
/**
* Convert document it receives as parameter to tree.
*
* @param document
*
* @return tree
*/
private JTree convertDocumentToJTree(Document document) {
Element rootElement = document.getDocumentElement();
Element node = (Element) rootElement.getFirstChild();
JTree tree = new JTree(createJavaTreeNode(node));
return tree;
}
/**
* Save document to file and return true is successful. Return false if error occurs.
*
* @param document
* @param file
*
* @return status
*/
private boolean saveDocumentToFile(Document document, File file) {
boolean status = true;
try {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "javatree.dtd");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OutputKeys.METHOD, "xml");
transformer.transform(new DOMSource(document), new StreamResult(new FileOutputStream(file)));
} catch (Exception e) {
status = false;
e.printStackTrace();
}
return status;
}
/**
* Reads a file and creates a document. Returns document if successful, or null if error occurs.
*
* @param file
*
* @return document
*/
private Document loadDocumentFromFile(File file) {
Document document = null;
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(true);
factory.setIgnoringElementContentWhitespace(true);
DocumentBuilder builder = factory.newDocumentBuilder();
document = builder.parse(file);
} catch (Exception e) {
e.printStackTrace();
document = null;
}
return document;
}
/**
* Creates an element for a document from a node.
*
* @param document
* @param node
*
* @return element
*/
private Element createNodeElement(Document document, DefaultMutableTreeNode node) {
Element element = document.createElement("Node");
Element nodeTitleElement = document.createElement("Title");
Text nodeTitleText = document.createTextNode(((JavaTreeNodeObject) node.getUserObject()).getTitle());
nodeTitleElement.appendChild(nodeTitleText);
element.appendChild(nodeTitleElement);
Element nodeTextElement = document.createElement("Text");
Text nodeTextText = document.createTextNode(((JavaTreeNodeObject) node.getUserObject()).getText());
nodeTextElement.appendChild(nodeTextText);
element.appendChild(nodeTextElement);
if (!node.isLeaf()) {
int childrenCount = node.getChildCount();
for (int index = 0; index < childrenCount; index++) {
element.appendChild(createNodeElement(document, (DefaultMutableTreeNode) node.getChildAt(index)));
}
}
return element;
}
/**
* Creates a node from a element.
*
* @param element
*
* @return treeNode
*/
private DefaultMutableTreeNode createJavaTreeNode(Element element) {
DefaultMutableTreeNode treeNode = new DefaultMutableTreeNode(new JavaTreeNodeObject());
NodeList nodeList = null;
nodeList = element.getChildNodes();
for (int index = 0; index < nodeList.getLength(); index++) {
Element child = (Element) nodeList.item(index);
String tagName = child.getTagName();
if (tagName.equals("Title")) {
Text titleText = (Text) child.getFirstChild();
if (titleText != null) {
((JavaTreeNodeObject) treeNode.getUserObject()).setTitle(titleText.getData().trim());
} else {
((JavaTreeNodeObject) treeNode.getUserObject()).setTitle("");
}
} else if (tagName.equals("Text")) {
Text textText = (Text) child.getFirstChild();
if (textText != null) {
((JavaTreeNodeObject) treeNode.getUserObject()).setText(textText.getData().trim());
} else {
((JavaTreeNodeObject) treeNode.getUserObject()).setText("");
}
} else if (tagName.equals("Node")) {
treeNode.add(createJavaTreeNode(child));
}
}
return treeNode;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.EventListener;
/**
* Manages the relationship between documents and PSI trees.
*/
public abstract class PsiDocumentManager {
/**
* Checks if the PSI tree for the specified document is up-to-date (its state reflects the latest changes made
* to the document content).
*
* @param document the document to check.
* @return true if the PSI tree for the document is up-to-date, false otherwise.
*/
public abstract boolean isCommitted(@NotNull Document document);
/**
* Returns the document manager instance for the specified project.
*
* @param project the project for which the document manager is requested.
* @return the document manager instance.
*/
public static PsiDocumentManager getInstance(@NotNull Project project) {
return project.getService(PsiDocumentManager.class);
}
/**
* Returns the PSI file for the specified document.
*
* @param document the document for which the PSI file is requested.
* @return the PSI file instance.
*/
@Nullable
public abstract PsiFile getPsiFile(@NotNull Document document);
/**
* Returns the cached PSI file for the specified document.
*
* @param document the document for which the PSI file is requested.
* @return the PSI file instance, or {@code null} if there is currently no cached PSI tree for the file.
*/
@Nullable
public abstract PsiFile getCachedPsiFile(@NotNull Document document);
/**
* Returns the document for the specified PSI file.
*
* @param file the file for which the document is requested.
* @return the document instance, or {@code null} if the file is binary or has no associated document.
*/
@Nullable
public abstract Document getDocument(@NotNull PsiFile file);
/**
* Returns the cached document for the specified PSI file.
*
* @param file the file for which the document is requested.
* @return the document instance, or {@code null} if there is currently no cached document for the file.
*/
@Nullable
public abstract Document getCachedDocument(@NotNull PsiFile file);
/**
* Commits (updates the PSI tree for) all modified but not committed documents.
* Before a modified document is committed, accessing its PSI may return elements
* corresponding to original (unmodified) state of the document.<p/>
*
* Should be called in UI thread in a write-safe context (see {@link com.intellij.openapi.application.TransactionGuard})
*/
public abstract void commitAllDocuments();
/**
* Commits all modified but not committed documents under modal dialog (see {@link PsiDocumentManager#commitAllDocuments()}
* Should be called in UI thread and outside write-action
* @return true if the operation completed successfully, false if it was cancelled.
*/
public abstract boolean commitAllDocumentsUnderProgress();
/**
* If the {@code document} is committed, run {@code action} immediately.
* Otherwise, schedule the execution of the {@code action} sometime in the future right after the {@code document} is committed.
*/
public abstract void performForCommittedDocument(@NotNull Document document, @NotNull Runnable action);
/**
* Updates the PSI tree for the specified document.
* Before a modified document is committed, accessing its PSI may return elements
* corresponding to original (unmodified) state of the document.<p/>
*
* For documents with event-system-enabled PSI ({@link FileViewProvider#isEventSystemEnabled()}), should be called in UI thread in a write-safe context (see {@link com.intellij.openapi.application.TransactionGuard}).
* For other documents, can be called in background thread with read access. It's the responsibility of the caller to properly synchronize
* that PSI and ensure no other threads are reading or modifying it concurrently.
*
* @param document the document to commit.
*/
public abstract void commitDocument(@NotNull Document document);
/**
* @return the document text that PSI should be based upon. For changed documents, it's their old text until the document is committed.
* This sequence is immutable.
* @see com.intellij.util.text.ImmutableCharSequence
*/
@NotNull
public abstract CharSequence getLastCommittedText(@NotNull Document document);
/**
* @return for uncommitted documents, the last stamp before the document change: the same stamp that current PSI should have.
* For committed documents, just their stamp.
*
* @see Document#getModificationStamp()
* @see FileViewProvider#getModificationStamp()
*/
public abstract long getLastCommittedStamp(@NotNull Document document);
/**
* Returns the document for specified PsiFile intended to be used when working with committed PSI, e.g. outside dispatch thread.
* @param file the file for which the document is requested.
* @return an immutable document corresponding to the current PSI state. For committed documents, the contents and timestamp are equal to
* the ones of {@link #getDocument(PsiFile)}. For uncommitted documents, the text is {@link #getLastCommittedText(Document)} and
* the modification stamp is {@link #getLastCommittedStamp(Document)}.
*/
@Nullable
public abstract Document getLastCommittedDocument(@NotNull PsiFile file);
/**
* Returns the list of documents which have been modified but not committed.
*
* @return the list of uncommitted documents.
* @see #commitDocument(Document)
*/
public abstract @NotNull Document @NotNull [] getUncommittedDocuments();
/**
* Checks if the specified document has been committed.
*
* @param document the document to check.
* @return true if the document was modified but not committed, false otherwise
* @see #commitDocument(Document)
*/
public abstract boolean isUncommited(@NotNull Document document);
/**
* Checks if any modified documents have not been committed.
*
* @return true if there are uncommitted documents, false otherwise
*/
public abstract boolean hasUncommitedDocuments();
/**
* @return if any modified documents with event-system-enabled PSI have not been committed.
* @see FileViewProvider#isEventSystemEnabled()
*/
@ApiStatus.Experimental
public boolean hasEventSystemEnabledUncommittedDocuments() {
return hasUncommitedDocuments();
}
/**
* Commits the documents and runs the specified operation, which does not return a value, in a read action.
* Can be called from a thread other than the Swing dispatch thread.
*
* @param runnable the operation to execute.
*/
public abstract void commitAndRunReadAction(@NotNull Runnable runnable);
/**
* Commits the documents and runs the specified operation, which returns a value, in a read action.
* Can be called from a thread other than the Swing dispatch thread.
*
* @param computation the operation to execute.
* @return the value returned by the operation.
*/
public abstract <T> T commitAndRunReadAction(@NotNull Computable<T> computation);
/**
* Reparses the specified set of files after an external configuration change that would cause them to be parsed differently
* (for example, a language level change in the settings).
*
* @param files the files to reparse.
* @param includeOpenFiles if true, the files opened in editor tabs will also be reparsed.
*/
public abstract void reparseFiles(@NotNull final Collection<? extends VirtualFile> files, final boolean includeOpenFiles);
/**
* Listener for receiving notifications about creation of {@link Document} and {@link PsiFile} instances.
*/
public interface Listener extends EventListener {
/**
* Called when a document instance is created for a file.
*
* @param document the created document instance.
* @param psiFile the file for which the document was created.
* @see PsiDocumentManager#getDocument(PsiFile)
*/
void documentCreated(@NotNull Document document, @Nullable PsiFile psiFile);
/**
* Called when a file instance is created for a document.
*
* @param file the created file instance.
* @param document the document for which the file was created.
* @see PsiDocumentManager#getDocument(PsiFile)
*/
default void fileCreated(@NotNull PsiFile file, @NotNull Document document) {
}
}
/**
* @deprecated Use message bus {@link PsiDocumentListener#TOPIC}.
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public abstract void addListener(@NotNull Listener listener);
/**
* Checks if the PSI tree corresponding to the specified document has been modified and the changes have not
* yet been applied to the document. Documents in that state cannot be modified directly, because such changes
* would conflict with the pending PSI changes. Changes made through PSI are always applied in the end of a write action,
* and can be applied in the middle of a write action by calling {@link #doPostponedOperationsAndUnblockDocument}.
*
* @param doc the document to check.
* @return true if the corresponding PSI has changes that haven't been applied to the document.
*/
public abstract boolean isDocumentBlockedByPsi(@NotNull Document doc);
/**
* Applies pending changes made through the PSI to the specified document.
*
* @param doc the document to apply the changes to.
*/
public abstract void doPostponedOperationsAndUnblockDocument(@NotNull Document doc);
/**
* Defer action until all documents with event-system-enabled PSI are committed.
* Must be called from the EDT only.
*
* @param action to run when all documents are committed
* @return true if action was run immediately (i.e. all documents are already committed)
*/
public abstract boolean performWhenAllCommitted(@NotNull Runnable action);
/**
* Same as {@link #performLaterWhenAllCommitted(ModalityState, Runnable)} using {@link ModalityState#defaultModalityState()}
*/
public abstract void performLaterWhenAllCommitted(@NotNull Runnable runnable);
/**
* Schedule the {@code runnable} to be executed on Swing thread when all documents with event-system-enabled PSI
* are committed at some later moment in a given modality state.
* The {@code runnable} is guaranteed to be invoked when no write action is running, and not immediately.
* If the project is disposed before such moment, the {@code runnable} is not executed.
*/
public abstract void performLaterWhenAllCommitted(@NotNull ModalityState modalityState, @NotNull Runnable runnable);
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.javascript.jscomp.base.JSCompDoubles.isExactInt32;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import javax.annotation.Nullable;
/**
* Optimization for functions that have {@code var_args} or access the
* arguments array.
*
* <p>Example:
* <pre>
* function() { alert(arguments[0] + argument[1]) }
* </pre>
* to:
* <pre>
* function(a, b) { alert(a, b) }
* </pre>
*
* Each newly inserted variable name will be unique very much like the output
* of the AST found after the {@link Normalize} pass.
*/
class OptimizeArgumentsArray implements CompilerPass, ScopedCallback {
// The arguments object as described by ECMAScript version 3 section 10.1.8
private static final String ARGUMENTS = "arguments";
// To ensure that the newly introduced parameter names are unique. We will
// use this string as prefix unless the caller specifies a different prefix.
private static final String PARAMETER_PREFIX = "JSCompiler_OptimizeArgumentsArray_p";
// The prefix for the newly introduced parameter name.
private final String paramPrefix;
// To make each parameter name unique in the function we append a unique integer.
private int uniqueId = 0;
// Reference to the compiler object to notify any changes to source code AST.
private final AbstractCompiler compiler;
// A stack of arguments access list to the corresponding outer functions.
private final Deque<List<Node>> argumentsAccessStack = new ArrayDeque<>();
// The `arguments` access in the current scope.
//
// The elements are NAME nodes. This initial value is a error-detecting sentinel for the global
// scope, which is used because since `ArrayDeque` is null-hostile.
private List<Node> currentArgumentsAccesses = ImmutableList.of();
/**
* Construct this pass and use {@link #PARAMETER_PREFIX} as the prefix for
* all parameter names that it introduces.
*/
OptimizeArgumentsArray(AbstractCompiler compiler) {
this(compiler, PARAMETER_PREFIX);
}
/**
* @param paramPrefix the prefix to use for all parameter names that this
* pass introduces
*/
OptimizeArgumentsArray(AbstractCompiler compiler, String paramPrefix) {
this.compiler = checkNotNull(compiler);
this.paramPrefix = checkNotNull(paramPrefix);
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, checkNotNull(root), this);
}
@Override
public void enterScope(NodeTraversal traversal) {
if (!definesArgumentsVar(traversal.getScopeRoot())) {
return;
}
// Introduces a new access list and stores the access list of the outer scope.
argumentsAccessStack.push(currentArgumentsAccesses);
currentArgumentsAccesses = new ArrayList<>();
}
@Override
public void exitScope(NodeTraversal traversal) {
if (!definesArgumentsVar(traversal.getScopeRoot())) {
return;
}
// Attempt to replace the argument access and if the AST has been change,
// report back to the compiler.
tryReplaceArguments(traversal.getScopeRoot());
currentArgumentsAccesses = argumentsAccessStack.pop();
}
private static boolean definesArgumentsVar(Node root) {
return root.isFunction() && !root.isArrowFunction();
}
@Override
public boolean shouldTraverse(NodeTraversal unused0, Node unused1, Node unused2) {
return true;
}
@Override
public void visit(NodeTraversal traversal, Node node, Node parent) {
if (traversal.inGlobalHoistScope()) {
return; // Do no rewriting in the global scope.
}
if (node.isName() && ARGUMENTS.equals(node.getString())) {
currentArgumentsAccesses.add(node); // Record all potential references to the arguments array.
}
}
/**
* Tries to optimize all the arguments array access in this scope by assigning a name to each
* element.
*
* @param scope scope of the function
*/
private void tryReplaceArguments(Node scopeRoot) {
// Find the number of parameters that can be accessed without using `arguments`.
Node parametersList = NodeUtil.getFunctionParameters(scopeRoot);
checkState(parametersList.isParamList(), parametersList);
int numParameters = parametersList.getChildCount();
// Determine the highest index that is used to make an access on `arguments`. By default, assume
// that the value is the number of parameters to the function.
int highestIndex = getHighestIndex(numParameters - 1);
if (highestIndex < 0) {
return;
}
ImmutableSortedMap<Integer, String> argNames =
assembleParamNames(parametersList, highestIndex + 1);
changeMethodSignature(argNames, parametersList);
changeBody(argNames);
}
/**
* Iterate through all the references to arguments array in the
* function to determine the real highestIndex. Returns -1 when we should not
* be replacing any arguments for this scope - we should exit tryReplaceArguments
*
* @param highestIndex highest index that has been accessed from the arguments array
*/
private int getHighestIndex(int highestIndex) {
for (Node ref : currentArgumentsAccesses) {
Node getElem = ref.getParent();
// Bail on anything but argument[c] access where c is a constant.
// TODO(user): We might not need to bail out all the time, there might
// be more cases that we can cover.
if (!getElem.isGetElem() || ref != getElem.getFirstChild()) {
return -1;
}
Node indexNode = ref.getNext();
// We have something like arguments[x] where x is not a constant. That
// means at least one of the access is not known.
if (!indexNode.isNumber()) {
// TODO(user): Its possible not to give up just yet. The type
// inference did a 'semi value propagation'. If we know that string
// is never a subclass of the type of the index. We'd know that
// it is never 'callee'.
return -1; // Give up.
}
// We want to bail out if someone tries to access arguments[0.5] for example
double index = indexNode.getDouble();
if (!isExactInt32(index)) {
return -1;
}
Node getElemParent = getElem.getParent();
// When we have argument[0](), replacing it with a() is semantically
// different if argument[0] is a function call that refers to 'this'
if (getElemParent.isCall() && getElemParent.getFirstChild() == getElem) {
// TODO(user): We can consider using .call() if aliasing that
// argument allows shorter alias for other arguments.
return -1;
}
// Replace the highest index if we see an access that has a higher index
// than all the one we saw before.
int indexInt = (int) index;
if (indexInt > highestIndex) {
highestIndex = indexInt;
}
}
return highestIndex;
}
/**
* Inserts new formal parameters into the method's signature based on the given set of names.
*
* <p>Example: function() --> function(r0, r1, r2)
*
* @param argNames maps param index to param name, if the param with that index has a name.
* @param paramList node representing the function signature
*/
private void changeMethodSignature(ImmutableSortedMap<Integer, String> argNames, Node paramList) {
ImmutableSortedMap<Integer, String> newParams = argNames.tailMap(paramList.getChildCount());
for (String name : newParams.values()) {
paramList.addChildToBack(IR.name(name).srcrefIfMissing(paramList));
}
if (!newParams.isEmpty()) {
compiler.reportChangeToEnclosingScope(paramList);
}
}
/**
* Performs the replacement of arguments[x] -> a if x is known.
*
* @param argNames maps param index to param name, if the param with that index has a name.
*/
private void changeBody(ImmutableMap<Integer, String> argNames) {
for (Node ref : currentArgumentsAccesses) {
Node index = ref.getNext();
Node parent = ref.getParent();
int value = (int) index.getDouble(); // This was validated earlier.
@Nullable String name = argNames.get(value);
if (name == null) {
continue;
}
Node newName = IR.name(name).srcrefIfMissing(parent);
parent.replaceWith(newName);
// TODO(nickreid): See if we can do this fewer times. The accesses may be in different scopes.
compiler.reportChangeToEnclosingScope(newName);
}
}
/**
* Generates a {@link Map} from argument indices to parameter names.
*
* <p>A {@link Map} is used because the sequence may be sparse in the case that there is an
* anonymous param, such as a destructuring param. There may also be fewer returned names than
* {@code maxCount} if there is a rest param, since no additional params may be synthesized.
*
* @param maxCount The maximum number of argument names in the returned map.
*/
private ImmutableSortedMap<Integer, String> assembleParamNames(Node paramList, int maxCount) {
checkArgument(paramList.isParamList(), paramList);
ImmutableSortedMap.Builder<Integer, String> builder = ImmutableSortedMap.naturalOrder();
int index = 0;
// Collect all existing param names first...
for (Node param = paramList.getFirstChild(); param != null; param = param.getNext()) {
switch (param.getToken()) {
case NAME:
builder.put(index, param.getString());
break;
case ITER_REST:
return builder.buildOrThrow();
case DEFAULT_VALUE:
// `arguments` doesn't consider default values. It holds exactly the provided args.
case OBJECT_PATTERN:
case ARRAY_PATTERN:
// Patterns have no names to substitute into the body.
break;
default:
throw new IllegalArgumentException(param.toString());
}
index++;
}
// ... then synthesize any additional param names.
for (; index < maxCount; index++) {
builder.put(index, paramPrefix + uniqueId++);
}
return builder.buildOrThrow();
}
}
| |
package org.ovirt.engine.core.common.queries;
import java.io.Serializable;
public enum VdcQueryType implements Serializable {
// VM queries
IsVmWithSameNameExist(VdcQueryAuthType.User),
GetVmByVmId(VdcQueryAuthType.User),
GetVmByVmNameForDataCenter(VdcQueryAuthType.User),
GetAllVms(VdcQueryAuthType.User),
GetAllVmsForUser(VdcQueryAuthType.User),
GetUnregisteredVms,
GetUnregisteredVmTemplates,
GetVmsRunningOnOrMigratingToVds,
GetVmsByStorageDomain,
GetVmsByInstanceTypeId,
GetVmCustomProperties(VdcQueryAuthType.User),
GetVmConfigurationBySnapshot(VdcQueryAuthType.User),
GetVmFromConfiguration(VdcQueryAuthType.User),
GetVmOvfByVmId(VdcQueryAuthType.User),
GetSnapshotBySnapshotId(VdcQueryAuthType.User),
GetVmsByDiskGuid,
GetVmPayload(VdcQueryAuthType.User),
IsBalloonEnabled(VdcQueryAuthType.User),
GetSoundDevices(VdcQueryAuthType.User),
GetVmsByVnicProfileId,
GetTemplatesByVnicProfileId,
GetVirtioScsiControllers(VdcQueryAuthType.User),
GetVmsInit(VdcQueryAuthType.User),
GetVmNextRunConfiguration(VdcQueryAuthType.User),
GetVmUpdatesOnNextRunExists(VdcQueryAuthType.User),
// Vds queries
GetVdsByVdsId,
GetVdsByName,
GetVdsFenceStatus,
GetNewVdsFenceStatus,
GetAgentFenceOptions,
GetAllChildVlanInterfaces,
GetAllSiblingVlanInterfaces,
GetVlanParent,
GetVdsHooksById,
GetAllHosts,
GetHostsByClusterId(VdcQueryAuthType.User),
IsDisplayAddressConsistentInCluster,
GetAllVdsByStoragePool(VdcQueryAuthType.User),
GetHostListFromExternalProvider(),
GetHostGroupsFromExternalProvider(),
GetComputeResourceFromExternalProvider(),
GetDiscoveredHostListFromExternalProvider(),
GetProviderCertificateChain(),
GetHostsForStorageOperation,
GetServerSSHPublicKey,
GetServerSSHKeyFingerprint,
GetCpuStatisticsByVdsId,
// VdsStatic Queries
GetVdsStaticByName,
// Vds Networks
GetVdsInterfacesByVdsId(VdcQueryAuthType.User),
GetVdsFreeBondsByVdsId,
GetAllNetworks(VdcQueryAuthType.User),
GetAllNetworksByClusterId(VdcQueryAuthType.User),
GetNetworksByDataCenterId(VdcQueryAuthType.User),
GetManagementInterfaceAddressByVmId(VdcQueryAuthType.User),
GetInterfacesByLabelForNetwork,
// Vm Network
GetVmInterfacesByVmId(VdcQueryAuthType.User),
GetVmGuestAgentInterfacesByVmId(VdcQueryAuthType.User),
// Vnic Profiles
GetAllVnicProfiles(VdcQueryAuthType.User),
GetVnicProfileById(VdcQueryAuthType.User),
GetVnicProfilesByNetworkId(VdcQueryAuthType.User),
GetVnicProfilesByDataCenterId(VdcQueryAuthType.User),
GetVnicProfilesByNetworkQosId,
// Template Network
GetTemplateInterfacesByTemplateId(VdcQueryAuthType.User),
// Networks
GetVdsGroupsAndNetworksByNetworkId,
GetVdsAndNetworkInterfacesByNetworkId,
GetVdsWithoutNetwork,
GetVmsAndNetworkInterfacesByNetworkId,
GetVmTemplatesAndNetworkInterfacesByNetworkId,
GetNetworkById(VdcQueryAuthType.User),
// External network providers
GetAllExternalNetworksOnProvider,
GetExternalSubnetsOnProviderByNetwork,
// Network labels
GetNetworkLabelsByNetworkId,
GetNetworkLabelsByDataCenterId,
GetNetworkLabelsByHostNicId,
// NUMA
GetVdsNumaNodesByVdsId(VdcQueryAuthType.User),
GetVmNumaNodesByVmId(VdcQueryAuthType.User),
GetAllVmsWithNumaByVdsGroupId(VdcQueryAuthType.User),
// VdsGroups
GetVdsCertificateSubjectByVdsId(VdcQueryAuthType.User),
GetVdsCertificateSubjectByVmId(VdcQueryAuthType.User),
GetAllVdsGroups(VdcQueryAuthType.User),
GetVdsGroupByVdsGroupId(VdcQueryAuthType.User), // needed when updating VM
GetVdsGroupById(VdcQueryAuthType.User),
GetVdsGroupByName(VdcQueryAuthType.User),
GetVdsGroupsByStoragePoolId(VdcQueryAuthType.User),
GetNumberOfActiveVmsInVdsGroupByVdsGroupId,
GetNumberOfVmsInVdsGroupByVdsGroupId,
// Certificate
GetCACertificate(VdcQueryAuthType.User),
SignString(VdcQueryAuthType.User),
// VM Template based entities queries
IsVmTemlateWithSameNameExist(VdcQueryAuthType.User),
GetVmTemplate(VdcQueryAuthType.User),
GetAllVmTemplates(VdcQueryAuthType.User),
GetAllInstanceTypes(VdcQueryAuthType.User),
GetAllImageTypes(VdcQueryAuthType.User),
GetVmTemplatesDisks(VdcQueryAuthType.User),
GetVmTemplatesByStoragePoolId,
GetVmTemplatesByImageGuid,
GetSystemPermissions,
// VM Snapshot queries
GetAllVmSnapshotsByVmId(VdcQueryAuthType.User),
GetAllVmSnapshotsFromConfigurationByVmId(VdcQueryAuthType.User),
// Images queries
GetImageById(VdcQueryAuthType.User),
GetImagesList(VdcQueryAuthType.User),
GetImagesListByStoragePoolId(VdcQueryAuthType.User),
GetAllDisksByVmId(VdcQueryAuthType.User),
GetAllAttachableDisks(VdcQueryAuthType.User),
GetAllDisksByStorageDomainId,
GetAllDisks(VdcQueryAuthType.User),
GetAllDiskSnapshotsByStorageDomainId,
GetUnregisteredDisks,
GetUnregisteredDisk,
GetDiskByDiskId(VdcQueryAuthType.User),
GetDiskSnapshotByImageId,
// Users queries
GetUserVmsByUserIdAndGroups(VdcQueryAuthType.User),
GetAllDbUsers(VdcQueryAuthType.User),
GetDbUserByUserId(VdcQueryAuthType.User),
GetDbUserByUserNameAndDomain(VdcQueryAuthType.User),
GetUserBySessionId(VdcQueryAuthType.User),
// Directory queries:
GetDirectoryUserById(VdcQueryAuthType.User),
GetDirectoryGroupById(VdcQueryAuthType.User),
GetAvailableNamespaces(VdcQueryAuthType.User),
// Groups queries:
GetAllDbGroups(VdcQueryAuthType.User),
GetDbGroupById,
// VM pools queries
GetVmPoolById(VdcQueryAuthType.User),
GetAllVmPoolsAttachedToUser(VdcQueryAuthType.User),
GetAllVmsAndVmPools(VdcQueryAuthType.User),
IsVmPoolWithSameNameExists,
GetVmDataByPoolId(VdcQueryAuthType.User),
GetVmDataByPoolName(VdcQueryAuthType.User),
// Tags queries
GetAllTags,
GetRootTag,
GetTagByTagId,
GetTagByTagName,
GetTagsByUserGroupId,
GetTagsByUserId,
GetTagsByVmId,
GetTagsByTemplateId,
GetTagsByVdsId,
// System
GetSystemStatistics,
// Bookmarks
GetAllBookmarks,
GetBookmarkByBookmarkId,
GetBookmarkByBookmarkName,
// Configuration values
GetConfigurationValue(VdcQueryAuthType.User),
GetConfigurationValues(VdcQueryAuthType.User),
GetFenceConfigurationValue(VdcQueryAuthType.User),
GetDefaultTimeZone(VdcQueryAuthType.User),
GetAvailableStoragePoolVersions(VdcQueryAuthType.User),
GetAvailableClusterVersionsByStoragePool,
// AuditLog
GetAllEventMessages(VdcQueryAuthType.User),
GetAllAuditLogsByVMId(VdcQueryAuthType.User),
GetAllAuditLogsByVMTemplateId(VdcQueryAuthType.User),
GetAuditLogById,
// Search queries
Search,
// Public services
GetDomainList(VdcQueryAuthType.User),
GetAAAProfileList(VdcQueryAuthType.User),
RegisterVds(VdcQueryAuthType.User),
CheckDBConnection(VdcQueryAuthType.User),
ValidateSession(VdcQueryAuthType.User),
GetValueBySession,
// Auxiliary queries used by architecture compatibility
IsClusterEmpty(VdcQueryAuthType.User),
GetHostArchitecture(VdcQueryAuthType.User),
// License queries
GetAllServerCpuList,
// Multi Level Administration queries
GetAllRoles(VdcQueryAuthType.User),
GetRoleById(VdcQueryAuthType.User),
GetRoleByName,
GetPermissionById(VdcQueryAuthType.User),
GetPermissionByRoleId,
HasAdElementReconnectPermission(VdcQueryAuthType.User),
GetPermissionsByAdElementId(VdcQueryAuthType.User),
GetRoleActionGroupsByRoleId(VdcQueryAuthType.User),
GetPermissionsForObject(VdcQueryAuthType.User),
GetAllStoragePools(VdcQueryAuthType.User),
GetDataCentersWithPermittedActionOnClusters(VdcQueryAuthType.User),
GetClustersWithPermittedAction(VdcQueryAuthType.User),
GetVmTemplatesWithPermittedAction(VdcQueryAuthType.User),
// Storage
GetStorageDomainById(VdcQueryAuthType.User),
GetStorageDomainByName(VdcQueryAuthType.User),
GetStorageServerConnectionById,
GetAllStorageServerConnections,
GetStorageServerConnectionsForDomain,
GetStoragePoolById(VdcQueryAuthType.User),
GetStoragePoolByDatacenterName(VdcQueryAuthType.User),
GetStorageDomainsByConnection,
GetConnectionsByDataCenterAndStorageType,
GetStorageDomainsByStoragePoolId(VdcQueryAuthType.User),
GetStorageDomainsByImageId,
GetUnregisteredBlockStorageDomains,
GetVgList,
GetDeviceList,
DiscoverSendTargets,
GetStorageDomainsByVmTemplateId(VdcQueryAuthType.User),
GetVmsFromExportDomain("org.ovirt.engine.core.bll.storage"),
GetTemplatesFromExportDomain,
GetVmTemplatesFromStorageDomain(VdcQueryAuthType.User),
GetAllStorageDomains(VdcQueryAuthType.User),
GetExistingStorageDomainList,
GetStorageDomainByIdAndStoragePoolId,
GetStoragePoolsByStorageDomainId,
GetStoragePoolsByClusterService(VdcQueryAuthType.User),
GetStorageDomainListById,
GetLunsByVgId,
GetPermittedStorageDomainsByStoragePoolId(VdcQueryAuthType.User),
GetIscsiBondsByStoragePoolId,
GetStorageTypesInPoolByPoolId,
// Event Notification
GetEventSubscribersBySubscriberIdGrouped,
// oVirt
GetoVirtISOs,
// Async Tasks
GetTasksStatusesByTasksIDs,
// Quota
GetQuotaByStoragePoolId,
GetQuotaByQuotaId,
GetQuotaVdsGroupByQuotaId,
GetQuotaStorageByQuotaId,
GetVmsRelatedToQuotaId,
GetTemplatesRelatedToQuotaId,
GetPermissionsToConsumeQuotaByQuotaId,
GetQuotasByAdElementId,
GetQuotasConsumptionForCurrentUser(VdcQueryAuthType.User),
GetAllRelevantQuotasForStorage(VdcQueryAuthType.User),
GetAllRelevantQuotasForVdsGroup(VdcQueryAuthType.User),
// Jobs
GetJobByJobId,
GetJobsByCorrelationId,
GetJobsByOffset,
GetAllJobs,
GetAllSteps,
GetStepByStepId,
GetStepsByJobId,
// Commands
GetCommandsCompatibilityVersions(VdcQueryAuthType.User),
// Disks
GetNextAvailableDiskAliasNameByVMId(VdcQueryAuthType.User),
// Gluster
GetGlusterVolumeById,
GetGlusterVolumeOptionsInfo,
GetGlusterVolumeBricks,
GetGlusterVolumeBricksByServerId,
GetGlusterVolumeBricksByTaskId,
GetGlusterBrickById,
GetGlusterServersForImport,
GetAddedGlusterServers,
GetGlusterVolumeAdvancedDetails,
GetGlusterVolumeProfileInfo,
GetGlusterHooks,
GetGlusterHookContent,
GetGlusterHookById,
GetGlusterServerServicesByClusterId,
GetGlusterServerServicesByServerId,
GetGlusterClusterServiceByClusterId,
GetGlusterVolumeRebalanceStatus,
GetGlusterVolumeRemoveBricksStatus,
GetGlusterVolumeByTaskId,
GetDefaultConfigurationVersion(VdcQueryAuthType.User),
OsRepository(VdcQueryAuthType.User),
GetArchitectureCapabilities(VdcQueryAuthType.User),
// Providers
GetAllProviders,
GetAllNetworksForProvider,
//Network QoS
GetAllNetworkQosByStoragePoolId,
GetWatchdog(VdcQueryAuthType.User),
GetConsoleDevices(VdcQueryAuthType.User),
GetRngDevice(VdcQueryAuthType.User),
GetDeviceCustomProperties(VdcQueryAuthType.User),
// Scheduling
GetClusterPolicies,
GetClusterPolicyById,
GetAllPolicyUnits,
GetPolicyUnitById,
GetAttachedClustersByClusterPolicyId,
GetAffinityGroupById,
GetAffinityGroupsByClusterId,
GetAffinityGroupsByVmId,
GetAllDisksPartialDataByVmId(VdcQueryAuthType.User),
GetVmTemplateCount,
// Default type instead of having to null check
Unknown(VdcQueryAuthType.User);
/**
* What kind of authorization the query requires. Although this is essentially a <code>boolean</code>, it's
* implemented as an enum for future extendability.
*/
public static enum VdcQueryAuthType {
Admin,
User
}
private static final String DEFAULT_PACKAGE_NAME = "org.ovirt.engine.core.bll";
private String packageName;
private VdcQueryAuthType authType;
private VdcQueryType() {
packageName = DEFAULT_PACKAGE_NAME;
authType = VdcQueryAuthType.Admin;
}
private VdcQueryType(String packageName) {
this.packageName = packageName;
authType = VdcQueryAuthType.Admin;
}
private VdcQueryType(VdcQueryAuthType authType) {
packageName = DEFAULT_PACKAGE_NAME;
this.authType = authType;
}
public int getValue() {
return this.ordinal();
}
public static VdcQueryType forValue(int value) {
return values()[value];
}
public String getPackageName() {
return packageName;
}
public VdcQueryAuthType getAuthType() {
return authType;
}
public boolean isAdmin() {
return authType == VdcQueryAuthType.Admin;
}
}
| |
package com.company.eshop.repository;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;
import com.company.eshop.domain.Category;
import com.company.eshop.domain.Product;
import com.company.eshop.domain.ProductPK;
@Repository
public class ProductDAO {
@Autowired
@Qualifier(value="productCRUD")
private GenericDAOImpl<Product,ProductPK> crud;
@Autowired
private CategoryDAO categoryDAO;
public GenericDAOImpl<Product,ProductPK> getCrud() {
return crud;
}
public void setCrud(GenericDAOImpl<Product,ProductPK> crud) {
this.crud = crud;
}
public Product findProductByName(String name){
Query query=crud.getEntityManager().createQuery("SELECT p FROM Product p WHERE p.name=:name");
query.setParameter("name",name);
Product product;
try{
product=(Product) query.getSingleResult();
}catch(NoResultException e){
return null;
}
return product;
}
public List<Product> findProducts(Category category,Integer manufacturerId,boolean priceAscending,String name){
String priceOrderString;
String nameString="";
if(name!=null){
nameString=" p.name=:name ";
}
if(priceAscending){
priceOrderString=" ORDER BY p.price";
}else{
priceOrderString=" ORDER BY p.price DESC";
}
Query query;
if((category!=null)&&(manufacturerId!=null)){
if(name!=null){
nameString=" AND p.name LIKE :name ";
}else{
nameString=" ";
}
List<String> categoryList=categoryDAO.getSubCategoryNames(category);
String categoryString="";
if(!categoryList.isEmpty())
categoryString=" OR p.categoryId IN (:categoryList) ";
query=crud.getEntityManager().createQuery("SELECT p FROM Product p WHERE" +
" (p.categoryId=:categoryId "+categoryString+") AND p.manufacturerId=:manufacturerId "+nameString+" "+priceOrderString);
query.setParameter("categoryId", category.getId().getCategoryId());
if(!categoryList.isEmpty())
query.setParameter("categoryList",categoryList);
query.setParameter("manufacturerId",manufacturerId);
if(name!=null){
query.setParameter("name", "%"+name+"%");
}
}else if(category!=null){
if(name!=null){
nameString=" AND p.name LIKE :name ";
}else{
nameString=" ";
}
List<String> categoryList=categoryDAO.getSubCategoryNames(category);
String categoryString="";
if(!categoryList.isEmpty())
categoryString=" OR p.categoryId IN (:categoryList) ";
query=crud.getEntityManager().createQuery("SELECT p FROM Product p WHERE" +
" (p.categoryId=:categoryId "+categoryString+") "+nameString+" "+priceOrderString);
query.setParameter("categoryId", category.getId().getCategoryId());
if(!categoryList.isEmpty())
query.setParameter("categoryList",categoryList);
if(name!=null){
query.setParameter("name", "%"+name+"%");
}
}else if(manufacturerId!=null){
if(name!=null){
nameString=" AND p.name LIKE :name ";
}else{
nameString=" ";
}
query=crud.getEntityManager().createQuery("SELECT p FROM Product p WHERE" +
" p.manufacturerId=:manufacturerId "+nameString+" "+priceOrderString);
query.setParameter("manufacturerId",manufacturerId);
if(name!=null){
query.setParameter("name", "%"+name+"%");
}
}else{
if(name!=null){
nameString=" WHERE p.name LIKE :name ";
}else{
nameString=" ";
}
query=crud.getEntityManager().createQuery("SELECT p FROM Product p "+nameString+" "+priceOrderString);
if(name!=null){
query.setParameter("name", "%"+name+"%");
}
}
@SuppressWarnings("unchecked")
List<Product> products=query.getResultList();
Iterator<Product> iterator=products.iterator();
while(iterator.hasNext()){
iterator.next().getImages().iterator();
}
return products;
}
public List<Product> findBestsellers(){
Query query=crud.getEntityManager().createQuery("SELECT p,count(oi) FROM Product p JOIN p.productInstances pi" +
" JOIN pi.orderedItems oi JOIN oi.order o WHERE o.dbStatusId='F' GROUP BY p ORDER BY count(oi) DESC");
query.setMaxResults(5);
List<Product> products=new ArrayList<Product>();
@SuppressWarnings("unchecked")
List<Object[]> list=query.getResultList();
Iterator<Object[]> iterator=list.iterator();
while(iterator.hasNext()){
products.add((Product) iterator.next()[0]);
}
return products;
}
public Long getDependentObjectsNumber(ProductPK productPK){
int productId=productPK.getProductId();
Query query=getCrud().getEntityManager().createQuery("SELECT count(*) FROM Product p JOIN " +
" p.images im WHERE p.productId=:productId");
query.setParameter("productId",productId);
Long images=(Long) query.getSingleResult();
query=getCrud().getEntityManager().createQuery("SELECT count(*) FROM Product p JOIN " +
" p.opinions o WHERE p.productId=:productId");
query.setParameter("productId",productId);
Long opinions=(Long) query.getSingleResult();
query=getCrud().getEntityManager().createQuery("SELECT count(*) FROM Product p JOIN " +
" p.productInstances pi WHERE p.productId=:productId");
query.setParameter("productId",productId);
Long productInstances=(Long) query.getSingleResult();
query=getCrud().getEntityManager().createQuery("SELECT count(*) FROM Product p JOIN " +
" p.features f WHERE p.productId=:productId");
query.setParameter("productId",productId);
Long features=(Long) query.getSingleResult();
return images+opinions+productInstances+features;
}
public List<Object[]> findOrderedProducts(int orderId){
Query query=crud.getEntityManager().createQuery("SELECT p,count(p) FROM Order o JOIN o.orderedItems oi " +
" JOIN oi.productInstance pi JOIN pi.product p WHERE o.orderId=:orderId GROUP BY p ORDER BY count(p) DESC");
query.setParameter("orderId", orderId);
@SuppressWarnings("unchecked")
List<Object[]> list=query.getResultList();
return list;
}
}
| |
/**
* Copyright (C) 2013 by Raphael Michel under the MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package de.geeksfactory.opacclient.storage;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import de.geeksfactory.opacclient.objects.Account;
import de.geeksfactory.opacclient.objects.AccountData;
import de.geeksfactory.opacclient.objects.AccountItem;
import de.geeksfactory.opacclient.objects.LentItem;
import de.geeksfactory.opacclient.objects.ReservedItem;
import de.geeksfactory.opacclient.objects.SearchResult;
import de.geeksfactory.opacclient.reminder.Alarm;
public class AccountDataSource {
// Database fields
private SQLiteDatabase database;
private String[] allColumns = AccountDatabase.COLUMNS;
public AccountDataSource(Context context) {
AccountDatabase dbHelper = AccountDatabase.getInstance(context);
database = dbHelper.getWritableDatabase();
// we do not need to close the database, as only one instance is created
// see e.g. http://stackoverflow
// .com/questions/4547461/closing-the-database-in-a-contentprovider/12715032#12715032
}
public long addAccount(Account acc) {
ContentValues values = new ContentValues();
values.put("bib", acc.getLibrary());
values.put("label", acc.getLabel());
values.put("name", acc.getName());
values.put("password", acc.getPassword());
return database.insert("accounts", null, values);
}
public void update(Account acc) {
ContentValues values = new ContentValues();
values.put("bib", acc.getLibrary());
values.put("label", acc.getLabel());
values.put("name", acc.getName());
values.put("password", acc.getPassword());
values.put("passwordValid", acc.isPasswordKnownValid() ? 1 : 0);
database.update("accounts", values, "id = ?", new String[]{acc.getId() + ""});
}
public long addAccount(String bib, String label, String name, String password) {
ContentValues values = new ContentValues();
values.put("bib", bib);
values.put("label", label);
values.put("name", name);
values.put("password", password);
return database.insert("accounts", null, values);
}
public List<Account> getAllAccounts() {
List<Account> accs = new ArrayList<>();
Cursor cursor = database.query("accounts", allColumns, null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
Account acc = cursorToAccount(cursor);
accs.add(acc);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return accs;
}
public List<Account> getAllAccounts(String bib) {
List<Account> accs = new ArrayList<>();
String[] selA = {bib};
Cursor cursor = database.query("accounts", allColumns, "bib = ?", selA, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
Account acc = cursorToAccount(cursor);
accs.add(acc);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return accs;
}
public List<Account> getAccountsWithPassword() {
List<Account> accs = new ArrayList<>();
Cursor cursor = database.query("accounts", allColumns,
"name is not null AND name != '' AND password is not null", null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
Account acc = cursorToAccount(cursor);
accs.add(acc);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return accs;
}
public Account getAccount(long id) {
String[] selA = {"" + id};
Cursor cursor = database.query("accounts", allColumns, "id = ?", selA, null, null, null);
Account acc = null;
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
acc = cursorToAccount(cursor);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return acc;
}
private Account cursorToAccount(Cursor cursor) {
Account acc = new Account();
acc.setId(cursor.getLong(0));
acc.setLibrary(cursor.getString(1));
acc.setLabel(cursor.getString(2));
acc.setName(cursor.getString(3));
acc.setPassword(cursor.getString(4));
acc.setCached(cursor.getLong(5));
acc.setPasswordKnownValid(cursor.getLong(9) > 0);
return acc;
}
public void remove(Account acc) {
deleteAccountData(acc);
String[] selA = {"" + acc.getId()};
database.delete("accounts", "id=?", selA);
}
public int getExpiring(Account account, int tolerance) {
String[] selA = {String.valueOf(account.getId())};
Cursor cursor = database.query(AccountDatabase.TABLENAME_LENT, new String[]{"COUNT(*)"},
"account = ? AND date(deadline) < date('now','-" + tolerance + " days')", selA,
null, null, null);
cursor.moveToFirst();
int result = cursor.getInt(0);
cursor.close();
return result;
}
public AccountData getCachedAccountData(Account account) {
AccountData adata = new AccountData(account.getId());
List<LentItem> lent = new ArrayList<>();
String[] selectionArgs = {"" + account.getId()};
Cursor cursor = database.query(AccountDatabase.TABLENAME_LENT, AccountDatabase.COLUMNS_LENT,
"account = ?", selectionArgs, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
LentItem entry = cursorToLentItem(cursor);
lent.add(entry);
cursor.moveToNext();
}
cursor.close();
adata.setLent(lent);
List<ReservedItem> res = new ArrayList<>();
cursor = database.query(AccountDatabase.TABLENAME_RESERVATION,
AccountDatabase.COLUMNS_RESERVATIONS, "account = ?", selectionArgs, null, null,
null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
ReservedItem entry = cursorToReservedItem(cursor);
res.add(entry);
cursor.moveToNext();
}
cursor.close();
adata.setReservations(res);
String[] selA = {"" + account.getId()};
cursor = database
.query("accounts", new String[]{"pendingFees", "validUntil", "warning"}, "id = ?",
selA, null, null, null);
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
adata.setPendingFees(cursor.getString(0));
adata.setValidUntil(cursor.getString(1));
adata.setWarning(cursor.getString(2));
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return adata;
}
private LentItem cursorToLentItem(Cursor cursor) {
LentItem item = new LentItem();
setAccountItemAttributes(cursor, item);
item.setBarcode(cursor.getString(7));
item.setDeadline(cursor.getString(8));
item.setHomeBranch(cursor.getString(9));
item.setLendingBranch(cursor.getString(10));
item.setProlongData(cursor.getString(11));
item.setRenewable(cursor.getInt(12) == 1);
item.setDownloadData(cursor.getString(13));
item.setEbook(cursor.getInt(14) == 1);
return item;
}
private ReservedItem cursorToReservedItem(Cursor cursor) {
ReservedItem item = new ReservedItem();
setAccountItemAttributes(cursor, item);
item.setReadyDate(cursor.getString(7));
item.setExpirationDate(cursor.getString(8));
item.setBranch(cursor.getString(9));
item.setCancelData(cursor.getString(10));
item.setBookingData(cursor.getString(11));
return item;
}
private void setAccountItemAttributes(Cursor cursor, AccountItem item) {
item.setDbId(cursor.getLong(0));
item.setAccount(cursor.getLong(1));
item.setTitle(cursor.getString(2));
item.setAuthor(cursor.getString(3));
item.setFormat(cursor.getString(4));
item.setId(cursor.getString(5));
item.setStatus(cursor.getString(6));
String mediatype = cursor.getString(cursor.getColumnIndex("mediatype"));
item.setMediaType(mediatype != null ? SearchResult.MediaType.valueOf(mediatype) : null);
item.setCover(cursor.getString(cursor.getColumnIndex("cover")));
}
private ContentValues lentItemToContentValues(LentItem item, long accountId) {
ContentValues cv = new ContentValues();
setAccountItemAttributes(item, cv, accountId);
putOrNull(cv, "barcode", item.getBarcode());
putOrNull(cv, "deadline", item.getDeadline());
putOrNull(cv, "homebranch", item.getHomeBranch());
putOrNull(cv, "lending_branch", item.getLendingBranch());
putOrNull(cv, "prolong_data", item.getProlongData());
cv.put("renewable", item.isRenewable() ? 1 : 0);
putOrNull(cv, "download_data", item.getDownloadData());
cv.put("ebook", item.isEbook() ? 1 : 0);
return cv;
}
private ContentValues reservedItemToContentValues(ReservedItem item, long accountId) {
ContentValues cv = new ContentValues();
setAccountItemAttributes(item, cv, accountId);
putOrNull(cv, "ready", item.getReadyDate());
putOrNull(cv, "expiration", item.getExpirationDate());
putOrNull(cv, "branch", item.getBranch());
putOrNull(cv, "cancel_data", item.getCancelData());
putOrNull(cv, "booking_data", item.getBookingData());
return cv;
}
private void setAccountItemAttributes(AccountItem item, ContentValues cv, long accountId) {
if (item.getDbId() != null) cv.put("id", item.getDbId());
cv.put("account", accountId);
putOrNull(cv, "title", item.getTitle());
putOrNull(cv, "author", item.getAuthor());
putOrNull(cv, "format", item.getFormat());
putOrNull(cv, "itemid", item.getId());
putOrNull(cv, "status", item.getStatus());
putOrNull(cv, "cover", item.getCover());
putOrNull(cv, "mediatype",
item.getMediaType() != null ? item.getMediaType().toString() : null);
}
private void putOrNull(ContentValues cv, String key, LocalDate value) {
if (value != null) {
cv.put(key, value.toString());
} else {
cv.putNull(key);
}
}
private void putOrNull(ContentValues cv, String key, String value) {
if (value != null) {
cv.put(key, value);
} else {
cv.putNull(key);
}
}
public void invalidateCachedData() {
database.delete(AccountDatabase.TABLENAME_LENT, null, null);
database.delete(AccountDatabase.TABLENAME_RESERVATION, null, null);
database.delete(AccountDatabase.TABLENAME_ALARMS, null, null);
ContentValues update = new ContentValues();
update.put("cached", 0);
update.put("pendingFees", (String) null);
update.put("validUntil", (String) null);
update.put("warning", (String) null);
database.update(AccountDatabase.TABLENAME_ACCOUNTS, update, null, null);
}
public void deleteAccountData(Account account) {
database.delete(AccountDatabase.TABLENAME_LENT, "account = ?",
new String[]{"" + account.getId()});
database.delete(AccountDatabase.TABLENAME_RESERVATION, "account = ?",
new String[]{"" + account.getId()});
}
public void invalidateCachedAccountData(Account account) {
ContentValues update = new ContentValues();
update.put("cached", 0);
database.update(AccountDatabase.TABLENAME_ACCOUNTS, update, "id = ?",
new String[]{"" + account.getId()});
}
public long getCachedAccountDataTime(Account account) {
return getAccount(account.getId()).getCached();
}
public void storeCachedAccountData(Account account, AccountData adata) {
if (adata == null) {
return;
}
long time = System.currentTimeMillis();
ContentValues update = new ContentValues();
update.put("cached", time);
update.put("pendingFees", adata.getPendingFees());
update.put("validUntil", adata.getValidUntil());
update.put("warning", adata.getWarning());
database.update(AccountDatabase.TABLENAME_ACCOUNTS, update, "id = ?",
new String[]{"" + account.getId()});
database.delete(AccountDatabase.TABLENAME_LENT, "account = ?",
new String[]{"" + account.getId()});
for (LentItem entry : adata.getLent()) {
ContentValues insertmapping = lentItemToContentValues(entry, account.getId());
database.insert(AccountDatabase.TABLENAME_LENT, null, insertmapping);
}
database.delete(AccountDatabase.TABLENAME_RESERVATION, "account = ?",
new String[]{"" + account.getId()});
for (ReservedItem entry : adata.getReservations()) {
ContentValues insertmapping = reservedItemToContentValues(entry, account.getId());
database.insert(AccountDatabase.TABLENAME_RESERVATION, null, insertmapping);
}
}
public List<LentItem> getAllLentItems() {
List<LentItem> items = new ArrayList<>();
Cursor cursor = database
.query(AccountDatabase.TABLENAME_LENT, AccountDatabase.COLUMNS_LENT, null, null,
null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
LentItem item = cursorToLentItem(cursor);
items.add(item);
cursor.moveToNext();
}
cursor.close();
return items;
}
public LentItem getLentItem(long id) {
String[] selA = {"" + id};
Cursor cursor = database
.query(AccountDatabase.TABLENAME_LENT, AccountDatabase.COLUMNS_LENT, "id = ?", selA,
null, null, null);
LentItem item = null;
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
item = cursorToLentItem(cursor);
}
// Make sure to close the cursor
cursor.close();
return item;
}
public List<LentItem> getLentItems(long[] ids) {
List<LentItem> items = new ArrayList<>();
Cursor cursor = database
.query(AccountDatabase.TABLENAME_LENT, AccountDatabase.COLUMNS_LENT, "id IN(" +
joinLongs(ids, ",") + ")", null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
LentItem item = cursorToLentItem(cursor);
items.add(item);
cursor.moveToNext();
}
cursor.close();
return items;
}
public List<Account> getAccountsWithPassword(String ident) {
List<Account> accs = new ArrayList<>();
Cursor cursor = database.query("accounts", allColumns,
"name is not null AND name != '' AND password is not null AND bib = ?",
new String[]{ident}, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
Account acc = cursorToAccount(cursor);
accs.add(acc);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return accs;
}
public long addAlarm(LocalDate deadline, long[] media, DateTime alarmTime) {
for (long mid : media) {
if (getLentItem(mid) == null) {
throw new DataIntegrityException(
"Cannot add alarm with deadline " + deadline.toString() +
" that has dependency on the non-existing media item " + mid);
}
}
ContentValues values = new ContentValues();
values.put("deadline", deadline.toString());
values.put("media", joinLongs(media, ","));
values.put("alarm", alarmTime.toString());
values.put("notified", 0);
values.put("finished", 0);
return database.insert(AccountDatabase.TABLENAME_ALARMS, null, values);
}
public void updateAlarm(Alarm alarm) {
for (long mid : alarm.media) {
if (getLentItem(mid) == null) {
throw new DataIntegrityException(
"Cannot update alarm with deadline " + alarm.deadline.toString() +
" that has dependency on the non-existing media item " + mid);
}
}
ContentValues values = new ContentValues();
values.put("deadline", alarm.deadline.toString());
values.put("media", joinLongs(alarm.media, ","));
values.put("alarm", alarm.notificationTime.toString());
values.put("notified", alarm.notified ? 1 : 0);
values.put("finished", alarm.finished ? 1 : 0);
database.update(AccountDatabase.TABLENAME_ALARMS, values, "id = ?",
new String[]{alarm.id + ""});
}
public void resetNotifiedOnAllAlarams() {
ContentValues values = new ContentValues();
values.put("notified", 0);
database.update(AccountDatabase.TABLENAME_ALARMS, values, "finished = 0 AND notified = 1", null);
}
public Alarm getAlarmByDeadline(LocalDate deadline) {
String[] selA = {deadline.toString()};
Cursor cursor = database
.query(AccountDatabase.TABLENAME_ALARMS, AccountDatabase.COLUMNS_ALARMS,
"deadline = ?", selA, null, null, null);
Alarm item = null;
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
item = cursorToAlarm(cursor);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return item;
}
public Alarm getAlarm(long id) {
String[] selA = {"" + id};
Cursor cursor = database
.query(AccountDatabase.TABLENAME_ALARMS, AccountDatabase.COLUMNS_ALARMS, "id = ?",
selA, null, null, null);
Alarm item = null;
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
item = cursorToAlarm(cursor);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return item;
}
public List<Alarm> getAllAlarms() {
List<Alarm> alarms = new ArrayList<>();
Cursor cursor = database
.query(AccountDatabase.TABLENAME_ALARMS, AccountDatabase.COLUMNS_ALARMS, null, null,
null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
Alarm alarm = cursorToAlarm(cursor);
alarms.add(alarm);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return alarms;
}
public void clearAlarms() {
database.delete(AccountDatabase.TABLENAME_ALARMS, null, null);
}
private Alarm cursorToAlarm(Cursor cursor) {
Alarm alarm = new Alarm();
alarm.id = cursor.getLong(0);
alarm.deadline = new LocalDate(cursor.getString(1));
alarm.media = splitLongs(cursor.getString(2), ",");
alarm.notificationTime = new DateTime(cursor.getString(3));
alarm.notified = cursor.getInt(4) == 1;
alarm.finished = cursor.getInt(5) == 1;
return alarm;
}
public void removeAlarm(Alarm alarm) {
String[] selA = {"" + alarm.id};
database.delete(AccountDatabase.TABLENAME_ALARMS, "id=?", selA);
}
private String joinLongs(long[] longs, String separator) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (long l : longs) {
if (first) {
first = false;
} else {
sb.append(separator);
}
sb.append(l);
}
return sb.toString();
}
private long[] splitLongs(String string, String separator) {
String[] strings = string.split(separator);
long[] longs = new long[strings.length];
for (int i = 0; i < strings.length; i++) {
longs[i] = Long.valueOf(strings[i]);
}
return longs;
}
}
| |
/*
Copyright (C) 2001, 2008 United States Government as represented by
the Administrator of the National Aeronautics and Space Administration.
All Rights Reserved.
*/
package gov.nasa.worldwind.examples;
import gov.nasa.worldwind.WorldWind;
import gov.nasa.worldwind.avlist.AVKey;
import gov.nasa.worldwind.cache.FileStore;
import gov.nasa.worldwind.data.DataDescriptor;
import javax.swing.*;
import javax.swing.border.CompoundBorder;
import javax.swing.border.TitledBorder;
import javax.swing.event.EventListenerList;
import javax.swing.event.TableModelEvent;
import javax.swing.table.AbstractTableModel;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
/**
* @author dcollins
* @version $Id: InstalledDataPanel.java 7393 2008-11-07 00:12:17Z dcollins $
*/
public class InstalledDataPanel extends JPanel
{
public static final String REFRESH_ACTION_COMMAND = "InstalledDataPanel.RefreshActionCommand";
public static final String INSTALL_ACTION_COMMAND = "InstalledDataPanel.InstallActionCommand";
public static final String UNINSTALL_ACTION_COMMAND = "InstalledDataPanel.UninstallActionCommand";
public static final String PROPERTIES_ACTION_COMMAND = "InstalledDataPanel.PropertiesActionCommand";
public static final String ZOOM_ACTION_COMMAND = "InstalledDataPanel.ZoomActionCommand";
protected static final String COLUMN_INSTALLED = "Installed";
protected static final String COLUMN_NAME = "Name";
protected static final String COLUMN_DATA_STORE_LOCATION = "Data store location";
protected static final String COLUMN_DATA_STORE_PATH = "Path";
protected static final int ROW_OBJECT = -1;
private EventListenerList listenerList;
private ActionListener actionListenerDelegate;
// ToolBar components.
private JToolBar toolBar;
private JButton refreshButton;
private JButton installButton;
private JButton uninstallButton;
private JButton propertiesButton;
private JButton zoomButton;
// Table components.
private DataDescriptorTableModel tableModel;
private JTable table;
private JScrollPane scrollPane;
public InstalledDataPanel()
{
this.listenerList = new EventListenerList();
this.actionListenerDelegate = new ActionListenerDelegate(this);
this.makeComponents();
this.layoutComponents();
}
public void update(String installPath)
{
this.fill(installPath);
this.revalidate();
this.repaint();
}
public JButton getRefreshButton()
{
return this.refreshButton;
}
public JButton getInstallButton()
{
return this.installButton;
}
public JButton getUninstallButton()
{
return this.uninstallButton;
}
public JButton getPropertiesButton()
{
return this.propertiesButton;
}
public JButton getZoomButton()
{
return this.zoomButton;
}
public Iterable<DataDescriptor> getDataDescriptors()
{
java.util.List<DataDescriptor> objects = new java.util.ArrayList<DataDescriptor>();
Object o;
int rowCount = this.table.getRowCount();
for (int row = 0; row < rowCount; row++)
if ((o = this.table.getModel().getValueAt(row, ROW_OBJECT)) != null)
if (o instanceof DataDescriptor)
objects.add((DataDescriptor) o);
return objects;
}
public Iterable<DataDescriptor> getSelectedDataDescriptors()
{
java.util.List<DataDescriptor> selectedObjects = new java.util.ArrayList<DataDescriptor>();
int[] selectedRows = this.table.getSelectedRows();
if (selectedRows != null)
{
Object o;
for (int row : selectedRows)
if ((o = this.table.getModel().getValueAt(row, ROW_OBJECT)) != null)
if (o instanceof DataDescriptor)
selectedObjects.add((DataDescriptor) o);
}
return selectedObjects;
}
public ActionListener[] getActionListeners()
{
return this.listenerList.getListeners(ActionListener.class);
}
public void addActionListener(ActionListener listener)
{
this.listenerList.add(ActionListener.class, listener);
}
public void removeActionListener(ActionListener listener)
{
this.listenerList.remove(ActionListener.class, listener);
}
protected JTable getTable()
{
return this.table;
}
protected JToolBar getToolBar()
{
return this.toolBar;
}
protected void fill(String installPath)
{
// Clear any DataDescriptors from the table.
this.tableModel.setRowValues(null);
FileStore fileStore = WorldWind.getDataFileStore();
if (fileStore == null)
return;
java.util.List<? extends DataDescriptor> dataDescriptors = fileStore.findDataDescriptors(installPath);
this.tableModel.setRowValues(dataDescriptors);
}
protected void makeComponents()
{
this.toolBar = new JToolBar();
this.refreshButton = new JButton("Refresh");
this.installButton= new JButton("Install new data");
this.uninstallButton = new JButton("Uninstall selected");
this.propertiesButton = new JButton("Properties");
this.zoomButton = new JButton("Zoom to selected");
this.refreshButton.setActionCommand(REFRESH_ACTION_COMMAND);
this.installButton.setActionCommand(INSTALL_ACTION_COMMAND);
this.uninstallButton.setActionCommand(UNINSTALL_ACTION_COMMAND);
this.propertiesButton.setActionCommand(PROPERTIES_ACTION_COMMAND);
this.zoomButton.setActionCommand(ZOOM_ACTION_COMMAND);
this.refreshButton.addActionListener(this.actionListenerDelegate);
this.installButton.addActionListener(this.actionListenerDelegate);
this.uninstallButton.addActionListener(this.actionListenerDelegate);
this.propertiesButton.addActionListener(this.actionListenerDelegate);
this.zoomButton.addActionListener(this.actionListenerDelegate);
this.toolBar.add(this.refreshButton);
this.toolBar.add(this.installButton);
this.toolBar.add(this.uninstallButton);
this.toolBar.add(this.propertiesButton);
this.toolBar.add(this.zoomButton);
this.tableModel = new DataDescriptorTableModel();
this.setupTableModel(this.tableModel);
this.table = new JTable(this.tableModel);
this.table.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
this.table.setColumnSelectionAllowed(false);
this.scrollPane = new JScrollPane(this.table);
}
protected void layoutComponents()
{
this.setLayout(new BorderLayout());
this.setBorder(
new CompoundBorder(BorderFactory.createEmptyBorder(9, 9, 9, 9), new TitledBorder("Installed Data")));
this.toolBar.setFloatable(false);
this.toolBar.setRollover(true);
this.toolBar.setBackground(this.toolBar.getBackground().darker());
this.add(this.toolBar, BorderLayout.NORTH);
this.table.getColumn(COLUMN_INSTALLED).setMaxWidth(60);
this.table.getColumn(COLUMN_NAME).setPreferredWidth(200);
this.table.getColumn(COLUMN_DATA_STORE_LOCATION).setPreferredWidth(100);
this.table.getColumn(COLUMN_DATA_STORE_PATH).setPreferredWidth(100);
this.scrollPane.setPreferredSize(new Dimension(this.scrollPane.getPreferredSize().width, 160));
this.add(this.scrollPane, BorderLayout.CENTER);
}
private void setupTableModel(DataDescriptorTableModel tableModel)
{
java.util.List<String> columnNames = new ArrayList<String>();
java.util.List<Class<?>> columnTypes = new ArrayList<Class<?>>();
java.util.List<String> columnParams = new ArrayList<String>();
columnNames.add(COLUMN_INSTALLED);
columnTypes.add(Boolean.class);
columnParams.add(AVKey.INSTALLED);
columnNames.add(COLUMN_NAME);
columnTypes.add(String.class);
columnParams.add(AVKey.DATASET_NAME);
columnNames.add(COLUMN_DATA_STORE_LOCATION);
columnTypes.add(java.io.File.class);
columnParams.add(AVKey.FILE_STORE_LOCATION);
columnNames.add(COLUMN_DATA_STORE_PATH);
columnTypes.add(String.class);
columnParams.add(AVKey.DATA_CACHE_NAME);
tableModel.setColumnNames(columnNames);
tableModel.setColumnTypes(columnTypes);
tableModel.setColumnParameters(columnParams);
}
private static class ActionListenerDelegate implements ActionListener
{
private InstalledDataPanel panel;
public ActionListenerDelegate(InstalledDataPanel panel)
{
this.panel = panel;
}
public void actionPerformed(ActionEvent e)
{
// Guaranteed to return a non-null array
Object[] listeners = this.panel.listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i = listeners.length - 2; i >= 0; i -= 2)
{
if (listeners[i] == ActionListener.class)
{
((ActionListener) listeners[i + 1]).actionPerformed(e);
}
}
}
}
protected static class DataDescriptorTableModel extends AbstractTableModel
{
private java.util.List<String> columnNames = new java.util.ArrayList<String>();
private java.util.List<Class> columnTypes = new java.util.ArrayList<Class>();
private java.util.List<String> columnParams = new java.util.ArrayList<String>();
private java.util.List<DataDescriptor> rowValues = new java.util.ArrayList<DataDescriptor>();
public DataDescriptorTableModel()
{
}
public int getColumnCount()
{
return this.columnParams.size();
}
public int getRowCount()
{
return this.rowValues.size();
}
public String getColumnName(int columnIndex)
{
if (columnIndex < 0 || columnIndex >= this.columnNames.size())
return super.getColumnName(columnIndex);
return this.columnNames.get(columnIndex);
}
public Class<?> getColumnClass(int columnIndex)
{
if (columnIndex < 0 || columnIndex >= this.columnTypes.size())
return Object.class;
return this.columnTypes.get(columnIndex);
}
public Object getValueAt(int rowIndex, int columnIndex)
{
if (rowIndex < 0 || rowIndex >= this.rowValues.size())
return null;
DataDescriptor descriptor = this.rowValues.get(rowIndex);
if (descriptor == null)
return null;
if (columnIndex == ROW_OBJECT)
return descriptor;
else if (columnIndex < 0 || columnIndex >= this.columnParams.size())
return null;
String parameter = this.columnParams.get(columnIndex);
if (parameter == null)
return null;
return descriptor.getValue(parameter);
}
public java.util.List<String> getColumnNames()
{
return java.util.Collections.unmodifiableList(this.columnNames);
}
public void setColumnNames(Iterable<? extends String> newColumnNames)
{
this.columnNames.clear();
if (newColumnNames != null)
for (String name : newColumnNames)
this.columnNames.add(name);
this.fireTableRowsUpdated(TableModelEvent.HEADER_ROW, TableModelEvent.HEADER_ROW);
}
public java.util.List<Class> getColumnTypes()
{
return java.util.Collections.unmodifiableList(this.columnTypes);
}
public void setColumnTypes(Iterable<Class<?>> newColumnTypes)
{
this.columnTypes.clear();
if (newColumnTypes != null)
for (Class cls : newColumnTypes)
this.columnTypes.add(cls);
this.fireTableStructureChanged();
}
public java.util.List<String> getColumnParameters()
{
return java.util.Collections.unmodifiableList(this.columnParams);
}
public void setColumnParameters(Iterable<? extends String> newColumnParameters)
{
this.columnParams.clear();
if (newColumnParameters != null)
for (String parameter : newColumnParameters)
this.columnParams.add(parameter);
this.fireTableStructureChanged();
}
public java.util.List<DataDescriptor> getRowValues()
{
return java.util.Collections.unmodifiableList(this.rowValues);
}
public void setRowValues(Iterable<? extends DataDescriptor> newRowValues)
{
this.rowValues.clear();
if (newRowValues != null)
for (DataDescriptor descriptor : newRowValues)
this.rowValues.add(descriptor);
this.fireTableDataChanged();
}
}
}
| |
package org.broadinstitute.hellbender.tools.picard.vcf;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.liftover.LiftOver;
import htsjdk.samtools.reference.ReferenceSequenceFileWalker;
import htsjdk.samtools.util.CloserUtil;
import htsjdk.samtools.util.CollectionUtil;
import htsjdk.samtools.util.IOUtil;
import htsjdk.samtools.util.Interval;
import htsjdk.samtools.util.Log;
import htsjdk.samtools.util.ProgressLogger;
import htsjdk.samtools.util.SequenceUtil;
import htsjdk.samtools.util.SortingCollection;
import htsjdk.samtools.util.StringUtil;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.variantcontext.writer.Options;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
import htsjdk.variant.vcf.VCFFileReader;
import htsjdk.variant.vcf.VCFFilterHeaderLine;
import htsjdk.variant.vcf.VCFHeader;
import htsjdk.variant.vcf.VCFRecordCodec;
import org.broadinstitute.hellbender.cmdline.Argument;
import org.broadinstitute.hellbender.cmdline.CommandLineProgramProperties;
import org.broadinstitute.hellbender.cmdline.PicardCommandLineProgram;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.cmdline.programgroups.VariantProgramGroup;
import java.io.File;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.*;
/**
* Tool for lifting over a VCF to another genome build and producing a properly header'd,
* sorted and indexed VCF in one go.
*
* @author Tim Fennell
*/
@CommandLineProgramProperties(
usage = "Lifts a VCF over from one genome build to another using UCSC liftover. The output file will be sorted " +
"and indexed. Records may be rejected because they cannot be lifted over or because post-liftover the " +
"reference allele mismatches the target genome build. Rejected records will be emitted with filters " +
"to the REJECT file, on the source genome.",
usageShort = "Lifts a VCF between genome builds.",
programGroup = VariantProgramGroup.class
)
public class LiftoverVcf extends PicardCommandLineProgram {
@Argument(shortName = StandardArgumentDefinitions.INPUT_SHORT_NAME, doc = "The input VCF/BCF file to be lifted over.")
public File INPUT;
@Argument(shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc = "The output location to write the lifted over VCF/BCF to.")
public File OUTPUT;
@Argument(shortName = "C", doc = "The liftover chain file. See https://genome.ucsc.edu/goldenPath/help/chain.html for a description" +
" of chain files. See http://hgdownload.soe.ucsc.edu/downloads.html#terms for where to download chain files.")
public File CHAIN;
@Argument(doc = "File to which to write rejected records.")
public File REJECT;
/** Filter name to use when a target cannot be lifted over. */
public static final String FILTER_CANNOT_LIFTOVER = "FailedLiftover";
/** Filter name to use when a target is lifted over, but the reference allele doens't match the new reference. */
public static final String FILTER_MISMATCHING_REF_ALLELE = "MismatchedRefAllele";
/** Filters to be added to the REJECT file. */
private static final List<VCFFilterHeaderLine> FILTERS = CollectionUtil.makeList(
new VCFFilterHeaderLine(FILTER_CANNOT_LIFTOVER, "Variant could not be lifted between genome builds."),
new VCFFilterHeaderLine(FILTER_MISMATCHING_REF_ALLELE, "Reference allele does not match reference genome sequence after liftover.")
);
private final Log log = Log.getInstance(LiftoverVcf.class);
@Override
protected Object doWork() {
IOUtil.assertFileIsReadable(INPUT);
IOUtil.assertFileIsReadable(REFERENCE_SEQUENCE);
IOUtil.assertFileIsReadable(CHAIN);
IOUtil.assertFileIsWritable(OUTPUT);
IOUtil.assertFileIsWritable(REJECT);
////////////////////////////////////////////////////////////////////////
// Setup the inputs
////////////////////////////////////////////////////////////////////////
final LiftOver liftOver = new LiftOver(CHAIN);
final VCFFileReader in = new VCFFileReader(INPUT, false);
log.info("Loading up the target reference genome.");
final ReferenceSequenceFileWalker walker = new ReferenceSequenceFileWalker(REFERENCE_SEQUENCE);
final Map<String,byte[]> refSeqs = new HashMap<String,byte[]>();
for (final SAMSequenceRecord rec: walker.getSequenceDictionary().getSequences()) {
refSeqs.put(rec.getSequenceName(), walker.get(rec.getSequenceIndex()).getBases());
}
CloserUtil.close(walker);
////////////////////////////////////////////////////////////////////////
// Setup the outputs
////////////////////////////////////////////////////////////////////////
final VCFHeader inHeader = in.getFileHeader();
final VCFHeader outHeader = new VCFHeader(inHeader);
outHeader.setSequenceDictionary(walker.getSequenceDictionary());
final VariantContextWriter out = new VariantContextWriterBuilder().setOption(Options.INDEX_ON_THE_FLY)
.setOutputFile(OUTPUT).setReferenceDictionary(walker.getSequenceDictionary()).build();
out.writeHeader(outHeader);
final VariantContextWriter rejects = new VariantContextWriterBuilder().setOutputFile(REJECT).unsetOption(Options.INDEX_ON_THE_FLY).build();
final VCFHeader rejectHeader = new VCFHeader(in.getFileHeader());
for (final VCFFilterHeaderLine line : FILTERS) rejectHeader.addMetaDataLine(line);
rejects.writeHeader(rejectHeader);
////////////////////////////////////////////////////////////////////////
// Read the input VCF, lift the records over and write to the sorting
// collection.
////////////////////////////////////////////////////////////////////////
long failedLiftover = 0, failedAlleleCheck = 0, total = 0;
log.info("Lifting variants over and sorting.");
final SortingCollection<VariantContext> sorter = SortingCollection.newInstance(VariantContext.class,
new VCFRecordCodec(outHeader),
outHeader.getVCFRecordComparator(),
MAX_RECORDS_IN_RAM,
TMP_DIR);
ProgressLogger progress = new ProgressLogger(log, 1000000, "read");
for (final VariantContext ctx : in) {
++total;
final Interval source = new Interval(ctx.getContig(), ctx.getStart(), ctx.getEnd(), false, ctx.getContig() + ":" + ctx.getStart() + "-" + ctx.getEnd());
final Interval target = liftOver.liftOver(source, 1.0);
if (target == null) {
rejects.add(new VariantContextBuilder(ctx).filter(FILTER_CANNOT_LIFTOVER).make());
failedLiftover++;
}
else {
// Fix the alleles if we went from positive to negative strand
final List<Allele> alleles = new ArrayList<Allele>();
for (final Allele oldAllele : ctx.getAlleles()) {
if (target.isPositiveStrand() || oldAllele.isSymbolic()) {
alleles.add(oldAllele);
}
else {
alleles.add(Allele.create(SequenceUtil.reverseComplement(oldAllele.getBaseString()), oldAllele.isReference()));
}
}
// Build the new variant context
final VariantContextBuilder builder = new VariantContextBuilder(
ctx.getSource(),
target.getContig(),
target.getStart(),
target.getEnd(),
alleles);
builder.id(ctx.getID());
builder.attributes(ctx.getAttributes());
builder.genotypes(ctx.getGenotypes());
builder.filters(ctx.getFilters());
builder.log10PError(ctx.getLog10PError());
// Check that the reference allele still agrees with the reference sequence
boolean mismatchesReference = false;
for (final Allele allele : builder.getAlleles()) {
if (allele.isReference()) {
final byte[] ref = refSeqs.get(target.getContig());
final String refString = StringUtil.bytesToString(ref, target.getStart() - 1, target.length());
if (!refString.equalsIgnoreCase(allele.getBaseString())) {
mismatchesReference = true;
}
break;
}
}
if (mismatchesReference) {
rejects.add(new VariantContextBuilder(ctx).filter(FILTER_MISMATCHING_REF_ALLELE).make());
failedAlleleCheck++;
}
else {
sorter.add(builder.make());
}
}
progress.record(ctx.getContig(), ctx.getStart());
}
final NumberFormat pfmt = new DecimalFormat("0.0000%");
final String pct = pfmt.format((failedLiftover + failedAlleleCheck) / (double) total);
log.info("Processed ", total, " variants.");
log.info(failedLiftover, " variants failed to liftover.");
log.info(failedAlleleCheck, " variants lifted over but had mismatching reference alleles after lift over.");
log.info(pct, " of variants were not successfully lifted over and written to the output.");
rejects.close();
in.close();
////////////////////////////////////////////////////////////////////////
// Write the sorted outputs to the final output file
////////////////////////////////////////////////////////////////////////
sorter.doneAdding();
progress = new ProgressLogger(log, 1000000, "written");
log.info("Writing out sorted records to final VCF.");
for (final VariantContext ctx : sorter) {
out.add(ctx);
progress.record(ctx.getContig(), ctx.getStart());
}
out.close();
sorter.cleanup();
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
* applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.apache.phoenix.hbase.index.covered;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.hbase.index.ValueGetter;
import org.apache.phoenix.hbase.index.covered.data.IndexMemStore;
import org.apache.phoenix.hbase.index.covered.data.LocalHBaseState;
import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
import org.apache.phoenix.hbase.index.covered.update.ColumnTracker;
import org.apache.phoenix.hbase.index.covered.update.IndexedColumnGroup;
import org.apache.phoenix.hbase.index.scanner.Scanner;
import org.apache.phoenix.hbase.index.scanner.ScannerBuilder;
import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
/**
* Manage the state of the HRegion's view of the table, for the single row.
* <p>
* Currently, this is a single-use object - you need to create a new one for each row that you need to manage. In the
* future, we could make this object reusable, but for the moment its easier to manage as a throw-away object.
* <p>
* This class is <b>not</b> thread-safe - it requires external synchronization is access concurrently.
*/
public class LocalTableState implements TableState {
private long ts;
private RegionCoprocessorEnvironment env;
private KeyValueStore memstore;
private LocalHBaseState table;
private Mutation update;
private Set<ColumnTracker> trackedColumns = new HashSet<ColumnTracker>();
private ScannerBuilder scannerBuilder;
private List<KeyValue> kvs = new ArrayList<KeyValue>();
private List<? extends IndexedColumnGroup> hints;
private CoveredColumns columnSet;
public LocalTableState(RegionCoprocessorEnvironment environment, LocalHBaseState table, Mutation update) {
this.env = environment;
this.table = table;
this.update = update;
this.memstore = new IndexMemStore();
this.scannerBuilder = new ScannerBuilder(memstore, update);
this.columnSet = new CoveredColumns();
}
public void addPendingUpdates(KeyValue... kvs) {
if (kvs == null) return;
addPendingUpdates(Arrays.asList(kvs));
}
public void addPendingUpdates(List<KeyValue> kvs) {
if (kvs == null) return;
setPendingUpdates(kvs);
addUpdate(kvs);
}
private void addUpdate(List<KeyValue> list) {
addUpdate(list, true);
}
private void addUpdate(List<KeyValue> list, boolean overwrite) {
if (list == null) return;
for (KeyValue kv : list) {
this.memstore.add(kv, overwrite);
}
}
@Override
public RegionCoprocessorEnvironment getEnvironment() {
return this.env;
}
@Override
public long getCurrentTimestamp() {
return this.ts;
}
/**
* Set the current timestamp up to which the table should allow access to the underlying table.
* This overrides the timestamp view provided by the indexer - use with care!
* @param timestamp timestamp up to which the table should allow access.
*/
public void setCurrentTimestamp(long timestamp) {
this.ts = timestamp;
}
public void resetTrackedColumns() {
this.trackedColumns.clear();
}
public Set<ColumnTracker> getTrackedColumns() {
return this.trackedColumns;
}
/**
* Get a scanner on the columns that are needed by the index.
* <p>
* The returned scanner is already pre-seeked to the first {@link KeyValue} that matches the given
* columns with a timestamp earlier than the timestamp to which the table is currently set (the
* current state of the table for which we need to build an update).
* <p>
* If none of the passed columns matches any of the columns in the pending update (as determined
* by {@link ColumnReference#matchesFamily(byte[])} and
* {@link ColumnReference#matchesQualifier(byte[])}, then an empty scanner will be returned. This
* is because it doesn't make sense to build index updates when there is no change in the table
* state for any of the columns you are indexing.
* <p>
* <i>NOTE:</i> This method should <b>not</b> be used during
* {@link IndexCodec#getIndexDeletes(TableState, BatchState)} as the pending update will not yet have been
* applied - you are merely attempting to cleanup the current state and therefore do <i>not</i>
* need to track the indexed columns.
* <p>
* As a side-effect, we update a timestamp for the next-most-recent timestamp for the columns you
* request - you will never see a column with the timestamp we are tracking, but the next oldest
* timestamp for that column.
* @param indexedColumns the columns to that will be indexed
* @param ignoreNewerMutations ignore mutations newer than m when determining current state. Useful
* when replaying mutation state for partial index rebuild where writes succeeded to the data
* table, but not to the index table.
* @return an iterator over the columns and the {@link IndexUpdate} that should be passed back to
* the builder. Even if no update is necessary for the requested columns, you still need
* to return the {@link IndexUpdate}, just don't set the update for the
* {@link IndexUpdate}.
* @throws IOException
*/
public Pair<Scanner, IndexUpdate> getIndexedColumnsTableState(
Collection<? extends ColumnReference> indexedColumns, boolean ignoreNewerMutations) throws IOException {
ensureLocalStateInitialized(indexedColumns, ignoreNewerMutations);
// filter out things with a newer timestamp and track the column references to which it applies
ColumnTracker tracker = new ColumnTracker(indexedColumns);
synchronized (this.trackedColumns) {
// we haven't seen this set of columns before, so we need to create a new tracker
if (!this.trackedColumns.contains(tracker)) {
this.trackedColumns.add(tracker);
}
}
Scanner scanner = this.scannerBuilder.buildIndexedColumnScanner(indexedColumns, tracker, ts);
return new Pair<Scanner, IndexUpdate>(scanner, new IndexUpdate(tracker));
}
/**
* Initialize the managed local state. Generally, this will only be called by
* {@link #getNonIndexedColumnsTableState(List)}, which is unlikely to be called concurrently from the outside. Even
* then, there is still fairly low contention as each new Put/Delete will have its own table state.
*/
private synchronized void ensureLocalStateInitialized(Collection<? extends ColumnReference> columns, boolean ignoreNewerMutations)
throws IOException {
// check to see if we haven't initialized any columns yet
Collection<? extends ColumnReference> toCover = this.columnSet.findNonCoveredColumns(columns);
// we have all the columns loaded, so we are good to go.
if (toCover.isEmpty()) { return; }
// add the current state of the row
this.addUpdate(this.table.getCurrentRowState(update, toCover, ignoreNewerMutations).list(), false);
// add the covered columns to the set
for (ColumnReference ref : toCover) {
this.columnSet.addColumn(ref);
}
}
@Override
public Map<String, byte[]> getUpdateAttributes() {
return this.update.getAttributesMap();
}
@Override
public byte[] getCurrentRowKey() {
return this.update.getRow();
}
/**
* @param hints
*/
public void setHints(List<? extends IndexedColumnGroup> hints) {
this.hints = hints;
}
@Override
public List<? extends IndexedColumnGroup> getIndexColumnHints() {
return this.hints;
}
@Override
public Collection<KeyValue> getPendingUpdate() {
return this.kvs;
}
/**
* Set the {@link KeyValue}s in the update for which we are currently building an index update, but don't actually
* apply them.
*
* @param update
* pending {@link KeyValue}s
*/
public void setPendingUpdates(Collection<KeyValue> update) {
this.kvs.clear();
this.kvs.addAll(update);
}
/**
* Apply the {@link KeyValue}s set in {@link #setPendingUpdates(Collection)}.
*/
public void applyPendingUpdates() {
this.addUpdate(kvs);
}
/**
* Rollback all the given values from the underlying state.
*
* @param values
*/
public void rollback(Collection<KeyValue> values) {
for (KeyValue kv : values) {
this.memstore.rollback(kv);
}
}
@Override
public Pair<ValueGetter, IndexUpdate> getIndexUpdateState(Collection<? extends ColumnReference> indexedColumns, boolean ignoreNewerMutations)
throws IOException {
Pair<Scanner, IndexUpdate> pair = getIndexedColumnsTableState(indexedColumns, ignoreNewerMutations);
ValueGetter valueGetter = IndexManagementUtil.createGetterFromScanner(pair.getFirst(), getCurrentRowKey());
return new Pair<ValueGetter, IndexUpdate>(valueGetter, pair.getSecond());
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.common;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.marshalling.impl.MarshallerReaderContext;
import org.drools.core.marshalling.impl.MarshallerWriteContext;
import org.drools.core.marshalling.impl.ProtobufMessages;
import org.drools.core.phreak.PropagationEntry;
import org.drools.core.spi.Activation;
import org.drools.core.spi.PropagationContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* Implementation of a <code>RuleFlowGroup</code> that collects activations
* of rules of this ruleflow-group.
* If this group is activated, all its activations are added to the agenda.
* As long as this group is active, its activations are added to the agenda.
* Deactivating the group removes all its activations from the agenda and
* collects them until it is activated again.
* By default, <code>RuleFlowGroups</code> are automatically deactivated when there are no more
* activations in the <code>RuleFlowGroup</code>. However, this can be configured.
*/
public class RuleFlowGroupImpl
implements
InternalRuleFlowGroup,
InternalAgendaGroup {
private static final long serialVersionUID = 510l;
private InternalWorkingMemory workingMemory;
// private String name;
// private boolean active = false;
private boolean autoDeactivate = true;
// private LinkedList<ActivationNode> list;
private List<RuleFlowGroupListener> listeners;
private Map<Long, String> nodeInstances = new HashMap<Long, String>();
// private long activatedForRecency;
// private long clearedForRecency;
private final InternalAgendaGroup agendaGroup;
public RuleFlowGroupImpl() {
agendaGroup = null;
}
/**
* Construct a <code>RuleFlowGroupImpl</code> with the given name.
*
* @param name
* The RuleFlowGroup name.
*/
public RuleFlowGroupImpl(final String name, InternalKnowledgeBase kBase) {
//this.name = name;
//this.list = new LinkedList();
agendaGroup = new AgendaGroupQueueImpl(name, kBase);
}
public RuleFlowGroupImpl(final String name,
final boolean active,
final boolean autoDeactivate) {
agendaGroup = null;
// this.name = name;
// this.active = active;
// this.autoDeactivate = autoDeactivate;
// this.list = new LinkedList();
// this.clearedForRecency = -1;
}
public String getName() {
return agendaGroup.getName();
}
public void setWorkingMemory(InternalWorkingMemory workingMemory) {
this.workingMemory = workingMemory;
}
public InternalWorkingMemory getWorkingMemory() {
return this.workingMemory;
}
@Override
public void hasRuleFlowListener(boolean hasRuleFlowLister) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isRuleFlowListener() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Activation remove() {
return agendaGroup.remove();
}
public Activation peek() {
return agendaGroup.peek();
}
public void setActive(final boolean active) {
this.agendaGroup.setActive( active );
// if ( this.active == active ) {
// return;
// }
// this.active = active;
// synchronized ( list ) {
// if ( active ) {
// setActivatedForRecency( this.workingMemory.getFactHandleFactory().getRecency() );
// ((EventSupport) this.workingMemory).getAgendaEventSupport().fireBeforeRuleFlowGroupActivated( this,
// this.workingMemory );
// if ( this.list.isEmpty() ) {
// if ( this.autoDeactivate ) {
// // if the list of activations is empty and
// // auto-deactivate is on, deactivate this group
// WorkingMemoryAction action = new DeactivateCallback( this );
// this.workingMemory.queueWorkingMemoryAction( action );
// }
// } else {
// triggerActivations();
// }
// ((EventSupport) this.workingMemory).getAgendaEventSupport().fireAfterRuleFlowGroupActivated( this,
// this.workingMemory );
// } else {
// ((EventSupport) this.workingMemory).getAgendaEventSupport().fireBeforeRuleFlowGroupDeactivated( this,
// this.workingMemory );
//
// FastIterator it = list.fastIterator();
// for ( ActivationNode entry = list.getFirst(); entry != null; entry = (ActivationNode) it.next( entry ) ) {
// final Activation activation = entry.getActivation();
// activation.remove();
// if ( activation.getActivationGroupNode() != null ) {
// activation.getActivationGroupNode().getActivationGroup().removeActivation( activation );
// }
// }
//
// nodeInstances.clear();
// notifyRuleFlowGroupListeners();
// ((EventSupport) this.workingMemory).getAgendaEventSupport().fireAfterRuleFlowGroupDeactivated( this,
// this.workingMemory );
// }
// }
}
public boolean isActive() {
return agendaGroup.isActive();
}
@Override
public void setAutoFocusActivator(PropagationContext ctx) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public PropagationContext getAutoFocusActivator() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAutoDeactivate() {
return this.autoDeactivate;
}
public void setAutoDeactivate(final boolean autoDeactivate) {
this.autoDeactivate = autoDeactivate;
synchronized ( agendaGroup ) {
if ( autoDeactivate && agendaGroup.isActive() && agendaGroup.isEmpty() ) {
this.agendaGroup.setActive( false );
}
}
}
// private void triggerActivations() {
//
// // iterate all activations adding them to their AgendaGroups
// synchronized ( this.list ) {
// FastIterator it = list.fastIterator();
// for ( ActivationNode entry = list.getFirst(); entry != null; entry = (ActivationNode) it.next( entry ) ) {
// final Activation activation = entry.getActivation();
// ((InternalAgendaGroup) activation.getAgendaGroup()).add( activation );
// }
// }
//
// // making sure we re-evaluate agenda in case we are waiting for activations
// ((InternalAgenda) workingMemory.getAgenda()).notifyHalt();
// }
public void clear() {
synchronized ( agendaGroup ) {
agendaGroup.clear();
}
}
public void reset() {
synchronized ( agendaGroup ) {
agendaGroup.reset();
}
}
@Override
public void setFocus() {
//agendaGroup.setFocus();
}
@Override
public Activation[] getAndClear() {
return new Activation[0]; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void add(Activation activation) {
addActivation( activation );
}
public void addActivation(final Activation activation) {
synchronized ( agendaGroup ) {
agendaGroup.add(activation);
}
// assert activation.getActivationNode() == null;
// final ActivationNode node = new ActivationNode( activation,
// this );
// activation.setActivationNode( node );
// synchronized ( this.list ) {
// this.list.add( node );
// }
//
// if ( this.active ) {
// ((InternalAgendaGroup) activation.getAgendaGroup()).add( activation );
// }
}
@Override
public void remove(Activation activation) {
removeActivation( activation );
}
public void removeActivation(final Activation activation) {
synchronized ( agendaGroup ) {
agendaGroup.remove(activation);
}
// synchronized ( this.list ) {
// final ActivationNode node = activation.getActivationNode();
// this.list.remove( node );
// activation.setActivationNode( null );
// }
}
/**
* Checks if this ruleflow group is active and should automatically deactivate.
* If the queue is empty, it deactivates the group.
*/
public void deactivateIfEmpty() {
// synchronized ( this.list ) {
// if ( this.active && this.autoDeactivate && this.list.isEmpty() ) {
// // deactivate callback
// WorkingMemoryAction action = new DeactivateCallback( this );
// this.workingMemory.queueWorkingMemoryAction( action );
// }
// }
}
public void addRuleFlowGroupListener(RuleFlowGroupListener listener) {
if ( listeners == null ) {
listeners = new CopyOnWriteArrayList<RuleFlowGroupListener>();
}
listeners.add( listener );
}
public void removeRuleFlowGroupListener(RuleFlowGroupListener listener) {
if ( listeners != null ) {
listeners.remove( listener );
}
}
public void notifyRuleFlowGroupListeners() {
if ( listeners != null ) {
for ( java.util.Iterator<RuleFlowGroupListener> iterator = listeners.iterator(); iterator.hasNext(); ) {
iterator.next().ruleFlowGroupDeactivated();
}
}
}
public boolean isEmpty() {
synchronized ( agendaGroup ) {
return agendaGroup.isEmpty();
}
}
public Activation[] getActivations() {
synchronized ( agendaGroup ) {
//return agendaGroup.getActivations();
return null;
}
}
public java.util.Iterator iterator() {
//return agendaGroup.it
return null;
}
public void addNodeInstance(Long processInstanceId,
String nodeInstanceId) {
nodeInstances.put( processInstanceId,
nodeInstanceId );
}
public void removeNodeInstance(Long processInstanceId,
String nodeInstanceId) {
nodeInstances.put( processInstanceId,
nodeInstanceId );
}
public Map<Long, String> getNodeInstances() {
return nodeInstances;
}
public void setActivatedForRecency(long recency) {
agendaGroup.setActivatedForRecency( recency );
}
public long getActivatedForRecency() {
return agendaGroup.getActivatedForRecency();
}
public void setClearedForRecency(long recency) {
agendaGroup.setClearedForRecency( recency );
}
public long getClearedForRecency() {
return agendaGroup.getClearedForRecency();
}
public String toString() {
return "RuleFlowGroup '" + this.agendaGroup.remove() + "'";
}
@Override
public void visited() {
agendaGroup.visited();
}
public int size() {
synchronized ( agendaGroup ) {
return agendaGroup.size();
}
}
public boolean equals(final Object object) {
if ( (object == null) || !(object instanceof RuleFlowGroupImpl) ) {
return false;
}
if ( ((RuleFlowGroupImpl) object).getName().equals( getName() ) ) {
return true;
}
return false;
}
public int hashCode() {
return getName().hashCode();
}
public static class DeactivateCallback
extends PropagationEntry.AbstractPropagationEntry
implements WorkingMemoryAction {
private static final long serialVersionUID = 510l;
private InternalRuleFlowGroup ruleFlowGroup;
public DeactivateCallback(InternalRuleFlowGroup ruleFlowGroup) {
this.ruleFlowGroup = ruleFlowGroup;
}
public DeactivateCallback(MarshallerReaderContext context) throws IOException {
this.ruleFlowGroup = (InternalRuleFlowGroup) context.wm.getAgenda().getRuleFlowGroup( context.readUTF() );
}
public DeactivateCallback(MarshallerReaderContext context,
ProtobufMessages.ActionQueue.Action _action) {
this.ruleFlowGroup = (InternalRuleFlowGroup) context.wm.getAgenda().getRuleFlowGroup( _action.getDeactivateCallback().getRuleflowGroup() );
}
public ProtobufMessages.ActionQueue.Action serialize(MarshallerWriteContext context) {
return ProtobufMessages.ActionQueue.Action.newBuilder()
.setType( ProtobufMessages.ActionQueue.ActionType.DEACTIVATE_CALLBACK )
.setDeactivateCallback( ProtobufMessages.ActionQueue.DeactivateCallback.newBuilder()
.setRuleflowGroup( ruleFlowGroup.getName() )
.build() )
.build();
}
public void execute(InternalWorkingMemory workingMemory) {
// check whether ruleflow group is still empty first
if ( this.ruleFlowGroup.isEmpty() ) {
// deactivate ruleflow group
this.ruleFlowGroup.setActive( false );
}
}
}
@Override
public boolean isSequential() {
return agendaGroup.isSequential();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.MarshallingContext;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.converters.collections.AbstractCollectionConverter;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import com.thoughtworks.xstream.mapper.Mapper;
import hudson.model.Describable;
import hudson.model.Saveable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* Collection whose change is notified to the parent object for persistence.
*
* @author Kohsuke Kawaguchi
* @since 1.MULTISOURCE
*/
public class PersistedList<T> implements Iterable<T> {
protected final CopyOnWriteList<T> data = new CopyOnWriteList<T>();
protected Saveable owner = Saveable.NOOP;
protected PersistedList() {
}
protected PersistedList(Collection<? extends T> initialList) {
data.replaceBy(initialList);
}
public PersistedList(Saveable owner) {
setOwner(owner);
}
public void setOwner(Saveable owner) {
this.owner = owner;
}
public void add(T item) throws IOException {
data.add(item);
onModified();
}
public void addAll(Collection<? extends T> items) throws IOException {
data.addAll(items);
onModified();
}
public void replaceBy(Collection<? extends T> col) throws IOException {
data.replaceBy(col);
onModified();
}
public T get(int index) {
return data.get(index);
}
public <U extends T> U get(Class<U> type) {
for (T t : data)
if(type.isInstance(t))
return type.cast(t);
return null;
}
/**
* Gets all instances that matches the given type.
*/
public <U extends T> List<U> getAll(Class<U> type) {
List<U> r = new ArrayList<U>();
for (T t : data)
if(type.isInstance(t))
r.add(type.cast(t));
return r;
}
public int size() {
return data.size();
}
/**
* Removes an instance by its type.
*/
public void remove(Class<? extends T> type) throws IOException {
for (T t : data) {
if(t.getClass()==type) {
data.remove(t);
onModified();
return;
}
}
}
/**
* A convenience method to replace a single item.
*
* This method shouldn't be used when you are replacing a lot of stuff
* as copy-on-write semantics make this rather slow.
*/
public void replace(T from, T to) throws IOException {
List<T> copy = new ArrayList<T>(data.getView());
for (int i=0; i<copy.size(); i++) {
if (copy.get(i).equals(from))
copy.set(i,to);
}
data.replaceBy(copy);
}
public boolean remove(T o) throws IOException {
boolean b = data.remove(o);
if (b) onModified();
return b;
}
public void removeAll(Class<? extends T> type) throws IOException {
boolean modified=false;
for (T t : data) {
if(t.getClass()==type) {
data.remove(t);
modified=true;
}
}
if(modified)
onModified();
}
public void clear() {
data.clear();
}
public Iterator<T> iterator() {
return data.iterator();
}
/**
* Called when a list is mutated.
*/
protected void onModified() throws IOException {
owner.save();
}
/**
* Returns the snapshot view of instances as list.
*/
public List<T> toList() {
return data.getView();
}
/**
* Gets all the {@link Describable}s in an array.
*/
public T[] toArray(T[] array) {
return data.toArray(array);
}
public void addAllTo(Collection<? super T> dst) {
data.addAllTo(dst);
}
public boolean isEmpty() {
return data.isEmpty();
}
public boolean contains(Object item) {
return data.contains(item);
}
@Override public String toString() {
return toList().toString();
}
/**
* {@link Converter} implementation for XStream.
*
* Serializaion form is compatible with plain {@link List}.
*/
public static class ConverterImpl extends AbstractCollectionConverter {
CopyOnWriteList.ConverterImpl copyOnWriteListConverter;
public ConverterImpl(Mapper mapper) {
super(mapper);
copyOnWriteListConverter = new CopyOnWriteList.ConverterImpl(mapper());
}
public boolean canConvert(Class type) {
// handle subtypes in case the onModified method is overridden.
return PersistedList.class.isAssignableFrom(type);
}
public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
for (Object o : (PersistedList) source)
writeItem(o, context, writer);
}
public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
CopyOnWriteList core = copyOnWriteListConverter.unmarshal(reader, context);
try {
PersistedList r = (PersistedList)context.getRequiredType().newInstance();
r.data.replaceBy(core);
return r;
} catch (InstantiationException e) {
InstantiationError x = new InstantiationError();
x.initCause(e);
throw x;
} catch (IllegalAccessException e) {
IllegalAccessError x = new IllegalAccessError();
x.initCause(e);
throw x;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache30;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.core.Is.is;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.awaitility.Awaitility;
import org.awaitility.core.ConditionFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheListener;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.ExpirationAction;
import org.apache.geode.cache.ExpirationAttributes;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionEvent;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.internal.cache.CacheDistributionAdvisor;
import org.apache.geode.internal.cache.DistributedRegion;
import org.apache.geode.internal.cache.InitialImageOperation;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.rules.DistributedRestoreSystemProperties;
import org.apache.geode.test.junit.categories.DistributedTest;
import org.apache.geode.test.junit.rules.serializable.SerializableErrorCollector;
/**
* Make sure entry expiration does not happen during gii for bug 35214
*
* <p>
* TRAC #35214: hang during getInitialImage due to entry expiration
*
* <p>
* Entries should not expire during GII
*
* @since GemFire 5.0
*/
@Category(DistributedTest.class)
public class EntriesDoNotExpireDuringGIIRegressionTest extends CacheTestCase {
private static final int ENTRY_COUNT = 100;
private static final String REGION_NAME = "r1";
// TODO: value of expirationCount is not validated
private AtomicInteger expirationCount;
private AtomicBoolean afterRegionCreateInvoked;
private VM otherVM;
@Rule
public DistributedRestoreSystemProperties restoreSystemProperties =
new DistributedRestoreSystemProperties();
@Rule
public SerializableErrorCollector errorCollector = new SerializableErrorCollector();
@Before
public void setUp() throws Exception {
this.expirationCount = new AtomicInteger(0);
this.afterRegionCreateInvoked = new AtomicBoolean(false);
this.otherVM = Host.getHost(0).getVM(0);
initOtherVm(this.otherVM);
System.setProperty(LocalRegion.EXPIRY_MS_PROPERTY, "true");
InitialImageOperation.slowImageProcessing = 30;
}
@After
public void tearDown() throws Exception {
InitialImageOperation.slowImageProcessing = 0;
}
/**
* make sure entries do not expire during a GII
*/
@Test
public void entriesShouldNotExpireDuringGII() throws Exception {
AsyncInvocation updater = updateOtherVm(this.otherVM);
AttributesFactory factory = new AttributesFactory();
factory.setDataPolicy(DataPolicy.REPLICATE);
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setStatisticsEnabled(true);
factory.setEntryIdleTimeout(new ExpirationAttributes(1, ExpirationAction.INVALIDATE));
factory.addCacheListener(createCacheListener());
Region region = createRootRegion(REGION_NAME, factory.create());
updater.await();
await().until(() -> region.values().size() == 0);
assertThat(region.values().size()).isEqualTo(0);
assertThat(region.keySet().size()).isEqualTo(ENTRY_COUNT);
}
private void initOtherVm(final VM otherVM) {
otherVM.invoke(new CacheSerializableRunnable("init") {
@Override
public void run2() throws CacheException {
getCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
Region region = createRootRegion(REGION_NAME, factory.create());
for (int i = 1; i <= ENTRY_COUNT; i++) {
region.put("key" + i, "value" + i);
}
}
});
}
private AsyncInvocation updateOtherVm(final VM otherVM) {
return otherVM.invokeAsync(new CacheSerializableRunnable("update") {
@Override
public void run2() throws CacheException {
Region region = getRootRegion(REGION_NAME);
// let the main guys gii get started; we want to do updates during his gii
// wait for profile of getInitialImage cache to show up
CacheDistributionAdvisor advisor =
((DistributedRegion) region).getCacheDistributionAdvisor();
int expectedProfiles = 1;
await().until(
() -> assertThat(numberProfiles(advisor)).isGreaterThanOrEqualTo(expectedProfiles));
// start doing updates of the keys to see if we can get deadlocked
int updateCount = 1;
do {
for (int i = 1; i <= ENTRY_COUNT; i++) {
String key = "key" + i;
if (region.containsKey(key)) {
region.destroy(key);
} else {
region.put(key, "value" + i + "uc" + updateCount);
}
}
} while (updateCount++ < 20);
// do one more loop with no destroys
for (int i = 1; i <= ENTRY_COUNT; i++) {
String key = "key" + i;
if (!region.containsKey(key)) {
region.put(key, "value" + i + "uc" + updateCount);
}
}
}
});
}
private int numberProfiles(final CacheDistributionAdvisor advisor) {
return advisor.adviseInitialImage(null).getReplicates().size();
}
private CacheListener createCacheListener() {
return new CacheListenerAdapter() {
@Override
public void afterRegionCreate(final RegionEvent event) {
afterRegionCreateInvoked.set(true);
}
@Override
public void afterInvalidate(final EntryEvent event) {
errorCollector.checkThat("afterRegionCreate should have been seen",
afterRegionCreateInvoked.get(), is(true));
errorCollector.checkThat("Region should have been initialized",
((LocalRegion) event.getRegion()).isInitialized(), is(true));
expirationCount.incrementAndGet();
InitialImageOperation.slowImageProcessing = 0;
}
};
}
private ConditionFactory await() {
return Awaitility.await().atMost(2, MINUTES);
}
}
| |
/*---------------------------------------------------------------
* Copyright 2005 by the Radiological Society of North America
*
* This source software is released under the terms of the
* RSNA Public License (http://mirc.rsna.org/rsnapubliclicense)
*----------------------------------------------------------------*/
package org.rsna.servlets;
import org.apache.log4j.Logger;
import org.rsna.server.HttpRequest;
import org.rsna.server.HttpResponse;
import org.rsna.server.User;
import org.rsna.server.Users;
import org.rsna.server.UsersXmlFileImpl;
import org.rsna.util.StringUtil;
import java.io.File;
import java.util.Hashtable;
/**
* The User Manager Servlet.
* This servlet provides a browser-accessible user interface for
* editing the users.xml file.
*/
public class UserManagerServlet extends Servlet {
static final Logger logger = Logger.getLogger(UserManagerServlet.class);
String home = "/";
/**
* Construct a UserManagerServlet.
* @param root the root directory of the server.
* @param context the path identifying the servlet.
*/
public UserManagerServlet(File root, String context) {
super(root, context);
}
/**
* The servlet method that responds to an HTTP GET.
* This method returns an HTML page containing a form for
* adding, removing, and changing users, roles and their
* relationships. The initial contents of the form are
* constructed from the contents of the Tomcat/conf/tomcat-users.xml
* file.
* @param req the request object.
* @param res the response object.
*/
@Override
public void doGet(HttpRequest req, HttpResponse res) {
//Get the Users object.
Users users = Users.getInstance();
//Make sure the user is authorized to do this.
if (!req.userHasRole("admin") || !(users instanceof UsersXmlFileImpl)) {
res.setResponseCode(HttpResponse.forbidden);
res.send();
return;
}
//Make the page and return it.
if (req.hasParameter("suppress")) home = "";
res.write( getPage( (UsersXmlFileImpl)users ) );
res.setContentType("html");
res.disableCaching();
res.send();
}
/**
* The servlet method that responds to an HTTP POST.
* This method interprets the posted parameters as a new set
* of users and roles and constructs a new users.xml
* file. It then returns an HTML page containing a new form
* constructed from the new contents of the file.
* @param req the request object.
* @param res the response object.
*/
@Override
public void doPost(HttpRequest req, HttpResponse res) {
if (logger.isDebugEnabled()) {
String username = null;
if (req.isFromAuthenticatedUser()) {
username = req.getUser().getUsername();
}
logger.debug("POST received from "+username+" at "+req.getRemoteAddress()+"\n"+req.toString()+"\n");
logger.debug("Headers:\n"+req.listHeaders(""));
logger.debug("Cookies:\n"+req.listCookies(""));
logger.debug("User has shutdown role: "+req.userHasRole("shutdown"));
logger.debug("Request is from localhost: "+req.isFromLocalHost());
logger.debug("Request is referred from context \""+context+"\": "+req.isReferredFrom(context));
logger.debug("Request contains suppress parameter: "+req.hasParameter("suppress"));
}
//Make sure the user is authorized to do this.
if (!req.userHasRole("admin") || !req.isReferredFrom(context)) {
res.setResponseCode(HttpResponse.forbidden);
res.send();
return;
}
boolean canShutdown = req.userHasRole("shutdown") || req.isFromLocalHost();
//Get the Users object.
Users users = Users.getInstance();
//Make sure that this system is using the XML implementation.
if (!(users instanceof UsersXmlFileImpl)) {
res.setResponseCode(HttpResponse.notfound);
res.send();
return;
}
UsersXmlFileImpl usersXmlFileImpl = (UsersXmlFileImpl)users;
//Get the roles known to the system
java.util.HashSet<java.lang.String> systemRoles = usersXmlFileImpl.getRoles();
//Get the parameter names and values
String[] params = req.getParameterNames();
String[] values = new String[params.length];
for (int i=0; i<params.length; i++) {
values[i] = req.getParameter(params[i]);
}
//Get the number of users and the number of roles
int nUsers = getMaxIndex(params,"u") + 1;
int nRoles = getMaxIndex(params,"r") + 1;
//Get the names in a convenient array.
String[] roleNames = new String[nRoles];
for (int i=0; i<nRoles; i++) {
roleNames[i] = getValue(params,values,"r",i);
}
//Make a new table to store the users we are now creating.
Hashtable<String,User> newUserTable = new Hashtable<String,User>();
//If the current user does not have the shutdown role, then he
//cannot modify users with the shutdown role, so copy all the
//shutdown users into the newUserTable in order to prevent the
//current user from deleting them.
if (!canShutdown) {
String[] usernames = usersXmlFileImpl.getUsernames();
for (int i=0; i<usernames.length; i++) {
User user = usersXmlFileImpl.getUser(usernames[i]);
if (user.hasRole("shutdown")) newUserTable.put(usernames[i],user);
}
}
//Process all the input.
for (int i=0; i<nUsers; i++) {
String username = getValue(params,values,"u",i);
if (!username.equals("")) {
//Get the old user or create a new one if the old one doesn't exist.
User user = usersXmlFileImpl.getUser(username);
if (user == null) user = new User(username, "");
//(Only process existing users with the shutdown
//role if the current user has the shutdown role.)
if (canShutdown || !user.hasRole("shutdown")) {
//Update the password, if present.
String pw = getValue(params,values,"p",i).trim();
if (!pw.equals("")) user.setPassword( usersXmlFileImpl.convertPassword(pw) );
//Update the roles
for (int j=0; j<nRoles; j++) {
String roleName = roleNames[j];
boolean roleEnabled = !getValue(params,values,"cb",i,j).equals("");
if (canShutdown || !roleName.equals("shutdown")) {
//Only assign roles that are known to the system.
//This prevents an attack that creates roles.
//Such an attack doesn't do any harm, but the
//IBM security suite complains about it.
if (roleEnabled && systemRoles.contains(roleName)) user.addRole(roleName);
else user.removeRole(roleName);
}
}
newUserTable.put(username,user);
}
}
}
//Reset the users database from the hashtable.
usersXmlFileImpl.resetUsers(newUserTable);
//Make a new page from the new data and return it.
if (req.hasParameter("suppress")) home = "";
res.write(getPage(usersXmlFileImpl));
res.setContentType("html");
res.disableCaching();
res.send();
}
//Get the value of named parameter [i]
private String getValue(String[] params, String[] values, String prefix, int i) {
String name = prefix+i;
return getValueFromName(params,values,name);
}
//Get the value of named parameter [i,j]
private String getValue(String[] params, String[] values, String prefix, int i, int j) {
String name = prefix + "u" + i + "r" + j;
return getValueFromName(params,values,name);
}
//Get the value of the named parameter.
private String getValueFromName(String[] params, String[] values, String name) {
for (int i=0; i<params.length; i++) {
if (params[i].equals(name)) {
String value = values[i];
if (value == null) return "";
return StringUtil.filterXSS(filter(value.trim()));
}
}
return "";
}
//Find the maximum index value of a named parameter
private int getMaxIndex(String[] params, String prefix) {
int max = 0;
int v;
for (int i=0; i<params.length; i++) {
if (params[i].startsWith(prefix)) {
try {
String rest = params[i].substring(prefix.length());
v = Integer.parseInt(rest);
if (v > max) max = v;
}
catch (Exception skip) {
logger.debug("Unparsable param value: \""+params[i]+"\"");
}
}
}
return max;
}
//Create an HTML page containing the form for managing
//the users and roles.
private String getPage(UsersXmlFileImpl users) {
String[] usernames = users.getUsernames();
String[] rolenames = users.getRoleNames();
StringBuffer sb = new StringBuffer();
responseHead(sb);
makeTableHeader(sb, rolenames);
makeTableRows(sb, users, usernames, rolenames);
responseTail(sb);
return sb.toString();
}
private void makeTableHeader(StringBuffer sb, String[] rolenames) {
sb.append("<thead>\n");
sb.append(" <tr>\n");
sb.append(" <th class=\"thl\">Username</th>\n" );
for (int i=0; i<rolenames.length; i++) {
sb.append("<th class=\"thv\"><nobr>");
sb.append("<input type=\"checkbox\" onclick=\"toggleRoles("+i+",event)\"/> "+rolenames[i]+"</nobr>");
sb.append("<input name=\"r"+i+"\" type=\"hidden\" value=\""+rolenames[i]+"\"/></th>\n" );
}
sb.append(" <th class=\"thl\">Password</th>\n" );
sb.append(" </tr>\n" );
sb.append("</thead>\n" );
}
private void makeTableRows(
StringBuffer sb,
UsersXmlFileImpl users,
String[] usernames,
String[] rolenames) {
for (int i=0; i<usernames.length; i++) {
sb.append( "<tr>\n" );
sb.append( " <td class=\"tdu\">"
+ "<input name=\"u"+i+"\" value=\""+usernames[i]+"\"/>"
+ "</td>\n" );
for (int j=0; j<rolenames.length; j++) {
sb.append( "<td><input name=\"cbu"+i+"r"+j+"\" type=\"checkbox\"" );
if ((users.getUser(usernames[i]).hasRole(rolenames[j]))) sb.append( " checked=\"true\"" );
sb.append( "/></td>\n" );
}
sb.append( " <td class=\"tdp\">"
+ "<input name=\"p"+i+"\" type=\"password\" value=\"\"/>"
+ "</td>\n" );
sb.append( " </tr>\n" );
}
sb.append( "<tr>\n" );
sb.append( "<td class=\"tdu\"><input name=\"u"+usernames.length+"\"/></td>\n" );
for (int j=0; j<rolenames.length; j++) {
sb.append( "<td><input name=\"cbu"+usernames.length+"r"+j+"\" type=\"checkbox\"/></td>\n" );
}
sb.append( " <td class=\"tdp\"><input name=\"p"+usernames.length+"\"/></td>\n" );
sb.append( " </tr>\n" );
}
private void responseHead(StringBuffer sb) {
sb.append(
"<html>\n"
+ " <head>\n"
+ " <title>User Manager</title>\n"
+ " <link rel=\"Stylesheet\" type=\"text/css\" media=\"all\" href=\"/BaseStyles.css\"></link>\n"
+ " <link rel=\"Stylesheet\" type=\"text/css\" media=\"all\" href=\"/JSPopup.css\"></link>\n"
+ " <link rel=\"Stylesheet\" type=\"text/css\" media=\"all\" href=\"/UserManagerServlet.css\"></link>\n"
+ " <script> var home = \""+home+"\";</script>\n"
+ " <script language=\"JavaScript\" type=\"text/javascript\" src=\"/JSUtil.js\">;</script>\n"
+ " <script language=\"JavaScript\" type=\"text/javascript\" src=\"/JSPopup.js\">;</script>\n"
+ " <script language=\"JavaScript\" type=\"text/javascript\" src=\"/UserManagerServlet.js\">;</script>\n"
+ " </head>\n"
+ " <body>\n"
+ " <div style=\"float:right;\">\n"
);
if (!home.equals("")) {
sb.append(
" <img src=\"/icons/home.png\"\n"
+ " onclick=\"window.open('"+home+"','_self');\"\n"
+ " style=\"margin:2px;\"\n"
+ " title=\"Return to the home page\"/>\n"
+ " <br>\n"
);
}
sb.append(
" <img src=\"/icons/save.png\"\n"
+ " onclick=\"save();\"\n"
+ " style=\"margin:2px;\"\n"
+ " title=\"Save\"/>\n"
+ " </div>\n"
+ " <center>\n"
+ " <h1>User Manager</h1>\n"
+ " <p class=\"buttons\">\n"
+ " <input type=\"button\" onclick=\"showHideColumns()\" id=\"shRoles\" value=\"Hide Unused Roles\"/>\n"
+ " \n"
+ " <input type=\"button\" onclick=\"showRolesPopup()\" value=\"Show Role Definitions\"/>\n"
+ " </p>\n"
+ " <form id=\"formID\" action=\"/users\" method=\"post\" accept-charset=\"UTF-8\" action=\"\">\n"
);
if (home.equals("")) {
sb.append(
" <input type=\"hidden\" name=\"suppress\" value=\"\"/>\n"
);
}
sb.append(
" <table id=\"userTable\" border=\"1\">\n"
);
}
private void responseTail(StringBuffer sb) {
sb.append(
" </table>\n"
+ " </form>\n"
+ " </center>\n"
+ " </body>\n"
+ "</html>\n"
);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.net;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
public class TestNetworkTopology {
private static final Logger LOG =
LoggerFactory.getLogger(TestNetworkTopology.class);
private final static NetworkTopology cluster =
NetworkTopology.getInstance(new Configuration());
private DatanodeDescriptor dataNodes[];
@Rule
public Timeout testTimeout = new Timeout(30000);
@Before
public void setupDatanodes() {
dataNodes = new DatanodeDescriptor[] {
DFSTestUtil.getDatanodeDescriptor("1.1.1.1", "/d1/r1"),
DFSTestUtil.getDatanodeDescriptor("2.2.2.2", "/d1/r1"),
DFSTestUtil.getDatanodeDescriptor("3.3.3.3", "/d1/r2"),
DFSTestUtil.getDatanodeDescriptor("4.4.4.4", "/d1/r2"),
DFSTestUtil.getDatanodeDescriptor("5.5.5.5", "/d1/r2"),
DFSTestUtil.getDatanodeDescriptor("6.6.6.6", "/d2/r3"),
DFSTestUtil.getDatanodeDescriptor("7.7.7.7", "/d2/r3"),
DFSTestUtil.getDatanodeDescriptor("8.8.8.8", "/d2/r3"),
DFSTestUtil.getDatanodeDescriptor("9.9.9.9", "/d3/r1"),
DFSTestUtil.getDatanodeDescriptor("10.10.10.10", "/d3/r1"),
DFSTestUtil.getDatanodeDescriptor("11.11.11.11", "/d3/r1"),
DFSTestUtil.getDatanodeDescriptor("12.12.12.12", "/d3/r2"),
DFSTestUtil.getDatanodeDescriptor("13.13.13.13", "/d3/r2"),
DFSTestUtil.getDatanodeDescriptor("14.14.14.14", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("15.15.15.15", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("16.16.16.16", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("17.17.17.17", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("18.18.18.18", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("19.19.19.19", "/d4/r1"),
DFSTestUtil.getDatanodeDescriptor("20.20.20.20", "/d4/r1"),
};
for (int i = 0; i < dataNodes.length; i++) {
cluster.add(dataNodes[i]);
}
dataNodes[9].setDecommissioned();
dataNodes[10].setDecommissioned();
GenericTestUtils.setLogLevel(NetworkTopology.LOG, Level.TRACE);
}
@Test
public void testContains() throws Exception {
DatanodeDescriptor nodeNotInMap =
DFSTestUtil.getDatanodeDescriptor("8.8.8.8", "/d2/r4");
for (int i=0; i < dataNodes.length; i++) {
assertTrue(cluster.contains(dataNodes[i]));
}
assertFalse(cluster.contains(nodeNotInMap));
}
@Test
public void testNumOfChildren() throws Exception {
assertEquals(cluster.getNumOfLeaves(), dataNodes.length);
}
@Test
public void testCreateInvalidTopology() throws Exception {
NetworkTopology invalCluster =
NetworkTopology.getInstance(new Configuration());
DatanodeDescriptor invalDataNodes[] = new DatanodeDescriptor[] {
DFSTestUtil.getDatanodeDescriptor("1.1.1.1", "/d1/r1"),
DFSTestUtil.getDatanodeDescriptor("2.2.2.2", "/d1/r1"),
DFSTestUtil.getDatanodeDescriptor("3.3.3.3", "/d1")
};
invalCluster.add(invalDataNodes[0]);
invalCluster.add(invalDataNodes[1]);
try {
invalCluster.add(invalDataNodes[2]);
fail("expected InvalidTopologyException");
} catch (NetworkTopology.InvalidTopologyException e) {
assertTrue(e.getMessage().startsWith("Failed to add "));
assertTrue(e.getMessage().contains(
"You cannot have a rack and a non-rack node at the same " +
"level of the network topology."));
}
}
@Test
public void testRacks() throws Exception {
assertEquals(cluster.getNumOfRacks(), 6);
assertTrue(cluster.isOnSameRack(dataNodes[0], dataNodes[1]));
assertFalse(cluster.isOnSameRack(dataNodes[1], dataNodes[2]));
assertTrue(cluster.isOnSameRack(dataNodes[2], dataNodes[3]));
assertTrue(cluster.isOnSameRack(dataNodes[3], dataNodes[4]));
assertFalse(cluster.isOnSameRack(dataNodes[4], dataNodes[5]));
assertTrue(cluster.isOnSameRack(dataNodes[5], dataNodes[6]));
}
@Test
public void testGetDistance() throws Exception {
assertEquals(cluster.getDistance(dataNodes[0], dataNodes[0]), 0);
assertEquals(cluster.getDistance(dataNodes[0], dataNodes[1]), 2);
assertEquals(cluster.getDistance(dataNodes[0], dataNodes[3]), 4);
assertEquals(cluster.getDistance(dataNodes[0], dataNodes[6]), 6);
// verify the distance is zero as long as two nodes have the same path.
// They don't need to refer to the same object.
NodeBase node1 = new NodeBase(dataNodes[0].getHostName(),
dataNodes[0].getNetworkLocation());
NodeBase node2 = new NodeBase(dataNodes[0].getHostName(),
dataNodes[0].getNetworkLocation());
assertEquals(0, cluster.getDistance(node1, node2));
// verify the distance can be computed by path.
// They don't need to refer to the same object or parents.
NodeBase node3 = new NodeBase(dataNodes[3].getHostName(),
dataNodes[3].getNetworkLocation());
NodeBase node4 = new NodeBase(dataNodes[6].getHostName(),
dataNodes[6].getNetworkLocation());
assertEquals(0, NetworkTopology.getDistanceByPath(node1, node2));
assertEquals(4, NetworkTopology.getDistanceByPath(node2, node3));
assertEquals(6, NetworkTopology.getDistanceByPath(node2, node4));
}
@Test
public void testSortByDistance() throws Exception {
DatanodeDescriptor[] testNodes = new DatanodeDescriptor[3];
// array contains both local node & local rack node
testNodes[0] = dataNodes[1];
testNodes[1] = dataNodes[2];
testNodes[2] = dataNodes[0];
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dataNodes[0], testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[0]);
assertTrue(testNodes[1] == dataNodes[1]);
assertTrue(testNodes[2] == dataNodes[2]);
// array contains both local node & local rack node & decommissioned node
DatanodeDescriptor[] dtestNodes = new DatanodeDescriptor[5];
dtestNodes[0] = dataNodes[8];
dtestNodes[1] = dataNodes[12];
dtestNodes[2] = dataNodes[11];
dtestNodes[3] = dataNodes[9];
dtestNodes[4] = dataNodes[10];
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dataNodes[8], dtestNodes, dtestNodes.length - 2);
assertTrue(dtestNodes[0] == dataNodes[8]);
assertTrue(dtestNodes[1] == dataNodes[11]);
assertTrue(dtestNodes[2] == dataNodes[12]);
assertTrue(dtestNodes[3] == dataNodes[9]);
assertTrue(dtestNodes[4] == dataNodes[10]);
// array contains local node
testNodes[0] = dataNodes[1];
testNodes[1] = dataNodes[3];
testNodes[2] = dataNodes[0];
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dataNodes[0], testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[0]);
assertTrue(testNodes[1] == dataNodes[1]);
assertTrue(testNodes[2] == dataNodes[3]);
// array contains local rack node
testNodes[0] = dataNodes[5];
testNodes[1] = dataNodes[3];
testNodes[2] = dataNodes[1];
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dataNodes[0], testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[1]);
assertTrue(testNodes[1] == dataNodes[3]);
assertTrue(testNodes[2] == dataNodes[5]);
// array contains local rack node which happens to be in position 0
testNodes[0] = dataNodes[1];
testNodes[1] = dataNodes[5];
testNodes[2] = dataNodes[3];
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dataNodes[0], testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[1]);
assertTrue(testNodes[1] == dataNodes[3]);
assertTrue(testNodes[2] == dataNodes[5]);
// Same as previous, but with a different random seed to test randomization
testNodes[0] = dataNodes[1];
testNodes[1] = dataNodes[5];
testNodes[2] = dataNodes[3];
cluster.setRandomSeed(0xDEAD);
cluster.sortByDistance(dataNodes[0], testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[1]);
assertTrue(testNodes[1] == dataNodes[3]);
assertTrue(testNodes[2] == dataNodes[5]);
// Array of just rack-local nodes
// Expect a random first node
DatanodeDescriptor first = null;
boolean foundRandom = false;
for (int i=5; i<=7; i++) {
testNodes[0] = dataNodes[5];
testNodes[1] = dataNodes[6];
testNodes[2] = dataNodes[7];
cluster.sortByDistance(dataNodes[i], testNodes, testNodes.length);
if (first == null) {
first = testNodes[0];
} else {
if (first != testNodes[0]) {
foundRandom = true;
break;
}
}
}
assertTrue("Expected to find a different first location", foundRandom);
// Array of just remote nodes
// Expect random first node
first = null;
for (int i = 1; i <= 4; i++) {
testNodes[0] = dataNodes[13];
testNodes[1] = dataNodes[14];
testNodes[2] = dataNodes[15];
cluster.sortByDistance(dataNodes[i], testNodes, testNodes.length);
if (first == null) {
first = testNodes[0];
} else {
if (first != testNodes[0]) {
foundRandom = true;
break;
}
}
}
assertTrue("Expected to find a different first location", foundRandom);
//Reader is not a datanode, but is in one of the datanode's rack.
testNodes[0] = dataNodes[0];
testNodes[1] = dataNodes[5];
testNodes[2] = dataNodes[8];
Node rackClient = new NodeBase("/d3/r1/25.25.25");
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(rackClient, testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[8]);
assertTrue(testNodes[1] == dataNodes[5]);
assertTrue(testNodes[2] == dataNodes[0]);
//Reader is not a datanode , but is in one of the datanode's data center.
testNodes[0] = dataNodes[8];
testNodes[1] = dataNodes[5];
testNodes[2] = dataNodes[0];
Node dcClient = new NodeBase("/d1/r2/25.25.25");
cluster.setRandomSeed(0xDEADBEEF);
cluster.sortByDistance(dcClient, testNodes, testNodes.length);
assertTrue(testNodes[0] == dataNodes[0]);
assertTrue(testNodes[1] == dataNodes[5]);
assertTrue(testNodes[2] == dataNodes[8]);
}
@Test
public void testRemove() throws Exception {
for(int i=0; i<dataNodes.length; i++) {
cluster.remove(dataNodes[i]);
}
for(int i=0; i<dataNodes.length; i++) {
assertFalse(cluster.contains(dataNodes[i]));
}
assertEquals(0, cluster.getNumOfLeaves());
assertEquals(0, cluster.clusterMap.getChildren().size());
for(int i=0; i<dataNodes.length; i++) {
cluster.add(dataNodes[i]);
}
}
/**
* This picks a large number of nodes at random in order to ensure coverage
*
* @param numNodes the number of nodes
* @param excludedScope the excluded scope
* @return the frequency that nodes were chosen
*/
private Map<Node, Integer> pickNodesAtRandom(int numNodes,
String excludedScope, Collection<Node> excludedNodes) {
Map<Node, Integer> frequency = new HashMap<Node, Integer>();
for (DatanodeDescriptor dnd : dataNodes) {
frequency.put(dnd, 0);
}
for (int j = 0; j < numNodes; j++) {
Node random = cluster.chooseRandom(excludedScope, excludedNodes);
if (random != null) {
frequency.put(random, frequency.get(random) + 1);
}
}
LOG.info("Result:" + frequency);
return frequency;
}
/**
* This test checks that chooseRandom works for an excluded node.
*/
@Test
public void testChooseRandomExcludedNode() {
String scope = "~" + NodeBase.getPath(dataNodes[0]);
Map<Node, Integer> frequency = pickNodesAtRandom(100, scope, null);
for (Node key : dataNodes) {
// all nodes except the first should be more than zero
assertTrue(frequency.get(key) > 0 || key == dataNodes[0]);
}
}
/**
* This test checks that chooseRandom works for an excluded rack.
*/
@Test
public void testChooseRandomExcludedRack() {
Map<Node, Integer> frequency = pickNodesAtRandom(100, "~" + "/d2", null);
// all the nodes on the second rack should be zero
for (int j = 0; j < dataNodes.length; j++) {
int freq = frequency.get(dataNodes[j]);
if (dataNodes[j].getNetworkLocation().startsWith("/d2")) {
assertEquals(0, freq);
} else {
assertTrue(freq > 0);
}
}
}
/**
* This test checks that chooseRandom works for a list of excluded nodes.
*/
@Test
public void testChooseRandomExcludedNodeList() {
String scope = "~" + NodeBase.getPath(dataNodes[0]);
Set<Node> excludedNodes = new HashSet<>();
excludedNodes.add(dataNodes[3]);
excludedNodes.add(dataNodes[5]);
excludedNodes.add(dataNodes[7]);
excludedNodes.add(dataNodes[9]);
excludedNodes.add(dataNodes[13]);
excludedNodes.add(dataNodes[18]);
Map<Node, Integer> frequency = pickNodesAtRandom(100, scope, excludedNodes);
assertEquals("dn[3] should be excluded", 0,
frequency.get(dataNodes[3]).intValue());
assertEquals("dn[5] should be exclude18d", 0,
frequency.get(dataNodes[5]).intValue());
assertEquals("dn[7] should be excluded", 0,
frequency.get(dataNodes[7]).intValue());
assertEquals("dn[9] should be excluded", 0,
frequency.get(dataNodes[9]).intValue());
assertEquals("dn[13] should be excluded", 0,
frequency.get(dataNodes[13]).intValue());
assertEquals("dn[18] should be excluded", 0,
frequency.get(dataNodes[18]).intValue());
for (Node key : dataNodes) {
if (excludedNodes.contains(key)) {
continue;
}
// all nodes except the first should be more than zero
assertTrue(frequency.get(key) > 0 || key == dataNodes[0]);
}
}
/**
* This test checks that chooseRandom works when all nodes are excluded.
*/
@Test
public void testChooseRandomExcludeAllNodes() {
String scope = "~" + NodeBase.getPath(dataNodes[0]);
Set<Node> excludedNodes = new HashSet<>();
for (int i = 0; i < dataNodes.length; i++) {
excludedNodes.add(dataNodes[i]);
}
Map<Node, Integer> frequency = pickNodesAtRandom(100, scope, excludedNodes);
for (Node key : dataNodes) {
// all nodes except the first should be more than zero
assertTrue(frequency.get(key) == 0);
}
}
@Test(timeout=180000)
public void testInvalidNetworkTopologiesNotCachedInHdfs() throws Exception {
// start a cluster
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
// bad rack topology
String racks[] = { "/a/b", "/c" };
String hosts[] = { "foo1.example.com", "foo2.example.com" };
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).
racks(racks).hosts(hosts).build();
cluster.waitActive();
NamenodeProtocols nn = cluster.getNameNodeRpc();
Assert.assertNotNull(nn);
// Wait for one DataNode to register.
// The other DataNode will not be able to register up because of the rack mismatch.
DatanodeInfo[] info;
while (true) {
info = nn.getDatanodeReport(DatanodeReportType.LIVE);
Assert.assertFalse(info.length == 2);
if (info.length == 1) {
break;
}
Thread.sleep(1000);
}
// Set the network topology of the other node to the match the network
// topology of the node that came up.
int validIdx = info[0].getHostName().equals(hosts[0]) ? 0 : 1;
int invalidIdx = validIdx == 1 ? 0 : 1;
StaticMapping.addNodeToRack(hosts[invalidIdx], racks[validIdx]);
LOG.info("datanode " + validIdx + " came up with network location " +
info[0].getNetworkLocation());
// Restart the DN with the invalid topology and wait for it to register.
cluster.restartDataNode(invalidIdx);
Thread.sleep(5000);
while (true) {
info = nn.getDatanodeReport(DatanodeReportType.LIVE);
if (info.length == 2) {
break;
}
if (info.length == 0) {
LOG.info("got no valid DNs");
} else if (info.length == 1) {
LOG.info("got one valid DN: " + info[0].getHostName() +
" (at " + info[0].getNetworkLocation() + ")");
}
Thread.sleep(1000);
}
Assert.assertEquals(info[0].getNetworkLocation(),
info[1].getNetworkLocation());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Tests chooseRandom with include scope, excluding a few nodes.
*/
@Test
public void testChooseRandomInclude1() {
final String scope = "/d1";
final Set<Node> excludedNodes = new HashSet<>();
final Random r = new Random();
for (int i = 0; i < 4; ++i) {
final int index = r.nextInt(5);
excludedNodes.add(dataNodes[index]);
}
Map<Node, Integer> frequency = pickNodesAtRandom(100, scope, excludedNodes);
verifyResults(5, excludedNodes, frequency);
}
/**
* Tests chooseRandom with include scope at rack, excluding a node.
*/
@Test
public void testChooseRandomInclude2() {
String scope = dataNodes[0].getNetworkLocation();
Set<Node> excludedNodes = new HashSet<>();
final Random r = new Random();
int index = r.nextInt(1);
excludedNodes.add(dataNodes[index]);
final int count = 100;
Map<Node, Integer> frequency =
pickNodesAtRandom(count, scope, excludedNodes);
verifyResults(1, excludedNodes, frequency);
}
private void verifyResults(int upperbound, Set<Node> excludedNodes,
Map<Node, Integer> frequency) {
LOG.info("Excluded nodes are: {}", excludedNodes);
for (int i = 0; i < upperbound; ++i) {
final Node n = dataNodes[i];
LOG.info("Verifying node {}", n);
if (excludedNodes.contains(n)) {
assertEquals(n + " should not have been chosen.", 0,
(int) frequency.get(n));
} else {
assertTrue(n + " should have been chosen", frequency.get(n) > 0);
}
}
}
/**
* Tests chooseRandom with include scope, no exlucde nodes.
*/
@Test
public void testChooseRandomInclude3() {
String scope = "/d1";
Map<Node, Integer> frequency = pickNodesAtRandom(200, scope, null);
LOG.info("No node is excluded.");
for (int i = 0; i < 5; ++i) {
// all nodes should be more than zero
assertTrue(dataNodes[i] + " should have been chosen.",
frequency.get(dataNodes[i]) > 0);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.Collections.emptyList;
import static org.elasticsearch.common.settings.Setting.listSetting;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
*
*/
public class TransportService extends AbstractLifecycleComponent<TransportService> {
public static final String DIRECT_RESPONSE_PROFILE = ".direct";
private static final String HANDSHAKE_ACTION_NAME = "internal:transport/handshake";
private final CountDownLatch blockIncomingRequestsLatch = new CountDownLatch(1);
protected final Transport transport;
protected final ThreadPool threadPool;
protected final ClusterName clusterName;
protected final TaskManager taskManager;
volatile Map<String, RequestHandlerRegistry> requestHandlers = Collections.emptyMap();
final Object requestHandlerMutex = new Object();
final ConcurrentMapLong<RequestHolder> clientHandlers = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
final AtomicLong requestIds = new AtomicLong();
final CopyOnWriteArrayList<TransportConnectionListener> connectionListeners = new CopyOnWriteArrayList<>();
// An LRU (don't really care about concurrency here) that holds the latest timed out requests so if they
// do show up, we can print more descriptive information about them
final Map<Long, TimeoutInfoHolder> timeoutInfoHandlers =
Collections.synchronizedMap(new LinkedHashMap<Long, TimeoutInfoHolder>(100, .75F, true) {
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return size() > 100;
}
});
private final TransportService.Adapter adapter;
// tracer log
public static final Setting<List<String>> TRACE_LOG_INCLUDE_SETTING =
listSetting("transport.tracer.include", emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope);
public static final Setting<List<String>> TRACE_LOG_EXCLUDE_SETTING =
listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME),
Function.identity(), Property.Dynamic, Property.NodeScope);
private final ESLogger tracerLog;
volatile String[] tracerLogInclude;
volatile String[] tracelLogExclude;
/** if set will call requests sent to this id to shortcut and executed locally */
volatile DiscoveryNode localNode = null;
@Inject
public TransportService(Settings settings, Transport transport, ThreadPool threadPool) {
super(settings);
this.transport = transport;
this.threadPool = threadPool;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
setTracerLogInclude(TRACE_LOG_INCLUDE_SETTING.get(settings));
setTracerLogExclude(TRACE_LOG_EXCLUDE_SETTING.get(settings));
tracerLog = Loggers.getLogger(logger, ".tracer");
adapter = createAdapter();
taskManager = createTaskManager();
}
/**
* makes the transport service aware of the local node. this allows it to optimize requests sent
* from the local node to it self and by pass the network stack/ serialization
*/
public void setLocalNode(DiscoveryNode localNode) {
this.localNode = localNode;
}
// for testing
DiscoveryNode getLocalNode() {
return localNode;
}
public TaskManager getTaskManager() {
return taskManager;
}
protected Adapter createAdapter() {
return new Adapter();
}
protected TaskManager createTaskManager() {
return new TaskManager(settings);
}
// These need to be optional as they don't exist in the context of a transport client
@Inject(optional = true)
public void setDynamicSettings(ClusterSettings clusterSettings) {
clusterSettings.addSettingsUpdateConsumer(TRACE_LOG_INCLUDE_SETTING, this::setTracerLogInclude);
clusterSettings.addSettingsUpdateConsumer(TRACE_LOG_EXCLUDE_SETTING, this::setTracerLogExclude);
}
void setTracerLogInclude(List<String> tracerLogInclude) {
this.tracerLogInclude = tracerLogInclude.toArray(Strings.EMPTY_ARRAY);
}
void setTracerLogExclude(List<String> tracelLogExclude) {
this.tracelLogExclude = tracelLogExclude.toArray(Strings.EMPTY_ARRAY);
}
@Override
protected void doStart() {
adapter.rxMetric.clear();
adapter.txMetric.clear();
transport.transportServiceAdapter(adapter);
transport.start();
if (transport.boundAddress() != null && logger.isInfoEnabled()) {
logger.info("{}", transport.boundAddress());
for (Map.Entry<String, BoundTransportAddress> entry : transport.profileBoundAddresses().entrySet()) {
logger.info("profile [{}]: {}", entry.getKey(), entry.getValue());
}
}
registerRequestHandler(
HANDSHAKE_ACTION_NAME,
() -> HandshakeRequest.INSTANCE,
ThreadPool.Names.SAME,
(request, channel) -> channel.sendResponse(
new HandshakeResponse(localNode, clusterName, localNode != null ? localNode.getVersion() : Version.CURRENT)));
}
@Override
protected void doStop() {
try {
transport.stop();
} finally {
// in case the transport is not connected to our local node (thus cleaned on node disconnect)
// make sure to clean any leftover on going handles
for (Map.Entry<Long, RequestHolder> entry : clientHandlers.entrySet()) {
final RequestHolder holderToNotify = clientHandlers.remove(entry.getKey());
if (holderToNotify != null) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
threadPool.generic().execute(new AbstractRunnable() {
@Override
public void onRejection(Throwable t) {
// if we get rejected during node shutdown we don't wanna bubble it up
logger.debug("failed to notify response handler on rejection, action: {}", t, holderToNotify.action());
}
@Override
public void onFailure(Throwable t) {
logger.warn("failed to notify response handler on exception, action: {}", t, holderToNotify.action());
}
@Override
public void doRun() {
TransportException ex = new TransportException("transport stopped, action: " + holderToNotify.action());
holderToNotify.handler().handleException(ex);
}
});
}
}
}
}
@Override
protected void doClose() {
transport.close();
}
/**
* start accepting incoming requests.
* when the transport layer starts up it will block any incoming requests until
* this method is called
*/
public void acceptIncomingRequests() {
blockIncomingRequestsLatch.countDown();
}
public boolean addressSupported(Class<? extends TransportAddress> address) {
return transport.addressSupported(address);
}
public TransportInfo info() {
BoundTransportAddress boundTransportAddress = boundAddress();
if (boundTransportAddress == null) {
return null;
}
return new TransportInfo(boundTransportAddress, transport.profileBoundAddresses());
}
public TransportStats stats() {
return new TransportStats(
transport.serverOpen(), adapter.rxMetric.count(), adapter.rxMetric.sum(), adapter.txMetric.count(), adapter.txMetric.sum());
}
public BoundTransportAddress boundAddress() {
return transport.boundAddress();
}
public List<String> getLocalAddresses() {
return transport.getLocalAddresses();
}
public boolean nodeConnected(DiscoveryNode node) {
return node.equals(localNode) || transport.nodeConnected(node);
}
public void connectToNode(DiscoveryNode node) throws ConnectTransportException {
if (node.equals(localNode)) {
return;
}
transport.connectToNode(node);
}
/**
* Lightly connect to the specified node
*
* @param node the node to connect to
*/
public void connectToNodeLight(final DiscoveryNode node) {
if (node.equals(localNode)) {
return;
}
transport.connectToNodeLight(node);
}
/**
* Lightly connect to the specified node, and handshake cluster
* name and version
*
* @param node the node to connect to
* @param handshakeTimeout handshake timeout
* @return the connected node with version set
* @throws ConnectTransportException if the connection or the
* handshake failed
*/
public DiscoveryNode connectToNodeLightAndHandshake(
final DiscoveryNode node,
final long handshakeTimeout) throws ConnectTransportException {
return connectToNodeLightAndHandshake(node, handshakeTimeout, true);
}
/**
* Lightly connect to the specified node, returning updated node
* information. The handshake will fail if the cluster name on the
* target node mismatches the local cluster name and
* {@code checkClusterName} is {@code true}.
*
* @param node the node to connect to
* @param handshakeTimeout handshake timeout
* @param checkClusterName whether or not to ignore cluster name
* mismatches
* @return the connected node
* @throws ConnectTransportException if the connection failed
* @throws IllegalStateException if the handshake failed
*/
public DiscoveryNode connectToNodeLightAndHandshake(
final DiscoveryNode node,
final long handshakeTimeout,
final boolean checkClusterName) {
if (node.equals(localNode)) {
return localNode;
}
transport.connectToNodeLight(node);
try {
return handshake(node, handshakeTimeout, checkClusterName);
} catch (ConnectTransportException | IllegalStateException e) {
transport.disconnectFromNode(node);
throw e;
}
}
private DiscoveryNode handshake(
final DiscoveryNode node,
final long handshakeTimeout,
final boolean checkClusterName) throws ConnectTransportException {
final HandshakeResponse response;
try {
response = this.submitRequest(
node,
HANDSHAKE_ACTION_NAME,
HandshakeRequest.INSTANCE,
TransportRequestOptions.builder().withTimeout(handshakeTimeout).build(),
new FutureTransportResponseHandler<HandshakeResponse>() {
@Override
public HandshakeResponse newInstance() {
return new HandshakeResponse();
}
}).txGet();
} catch (Exception e) {
throw new IllegalStateException("handshake failed with " + node, e);
}
if (checkClusterName && !Objects.equals(clusterName, response.clusterName)) {
throw new IllegalStateException("handshake failed, mismatched cluster name [" + response.clusterName + "] - " + node);
} else if (!isVersionCompatible(response.version)) {
throw new IllegalStateException("handshake failed, incompatible version [" + response.version + "] - " + node);
}
return response.discoveryNode;
}
private boolean isVersionCompatible(Version version) {
return version.minimumCompatibilityVersion().equals(
localNode != null ? localNode.getVersion().minimumCompatibilityVersion() : Version.CURRENT.minimumCompatibilityVersion());
}
static class HandshakeRequest extends TransportRequest {
public static final HandshakeRequest INSTANCE = new HandshakeRequest();
private HandshakeRequest() {
}
}
static class HandshakeResponse extends TransportResponse {
private DiscoveryNode discoveryNode;
private ClusterName clusterName;
private Version version;
public HandshakeResponse() {
}
public HandshakeResponse(DiscoveryNode discoveryNode, ClusterName clusterName, Version version) {
this.discoveryNode = discoveryNode;
this.version = version;
this.clusterName = clusterName;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
discoveryNode = in.readOptionalWriteable(DiscoveryNode::new);
clusterName = new ClusterName(in);
version = Version.readVersion(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalWriteable(discoveryNode);
clusterName.writeTo(out);
Version.writeVersion(version, out);
}
}
public void disconnectFromNode(DiscoveryNode node) {
if (node.equals(localNode)) {
return;
}
transport.disconnectFromNode(node);
}
public void addConnectionListener(TransportConnectionListener listener) {
connectionListeners.add(listener);
}
public void removeConnectionListener(TransportConnectionListener listener) {
connectionListeners.remove(listener);
}
public <T extends TransportResponse> TransportFuture<T> submitRequest(DiscoveryNode node, String action, TransportRequest request,
TransportResponseHandler<T> handler) throws TransportException {
return submitRequest(node, action, request, TransportRequestOptions.EMPTY, handler);
}
public <T extends TransportResponse> TransportFuture<T> submitRequest(DiscoveryNode node, String action, TransportRequest request,
TransportRequestOptions options,
TransportResponseHandler<T> handler) throws TransportException {
PlainTransportFuture<T> futureHandler = new PlainTransportFuture<>(handler);
sendRequest(node, action, request, options, futureHandler);
return futureHandler;
}
public <T extends TransportResponse> void sendRequest(final DiscoveryNode node, final String action, final TransportRequest request,
final TransportResponseHandler<T> handler) {
sendRequest(node, action, request, TransportRequestOptions.EMPTY, handler);
}
public <T extends TransportResponse> void sendRequest(final DiscoveryNode node, final String action, final TransportRequest request,
final TransportRequestOptions options, TransportResponseHandler<T> handler) {
if (node == null) {
throw new IllegalStateException("can't send request to a null node");
}
final long requestId = newRequestId();
final TimeoutHandler timeoutHandler;
try {
if (options.timeout() == null) {
timeoutHandler = null;
} else {
timeoutHandler = new TimeoutHandler(requestId);
}
TransportResponseHandler<T> responseHandler =
new ContextRestoreResponseHandler<>(threadPool.getThreadContext().newStoredContext(), handler);
clientHandlers.put(requestId, new RequestHolder<>(responseHandler, node, action, timeoutHandler));
if (lifecycle.stoppedOrClosed()) {
// if we are not started the exception handling will remove the RequestHolder again and calls the handler to notify
// the caller. It will only notify if the toStop code hasn't done the work yet.
throw new TransportException("TransportService is closed stopped can't send request");
}
if (timeoutHandler != null) {
assert options.timeout() != null;
timeoutHandler.future = threadPool.schedule(options.timeout(), ThreadPool.Names.GENERIC, timeoutHandler);
}
if (node.equals(localNode)) {
sendLocalRequest(requestId, action, request);
} else {
transport.sendRequest(node, requestId, action, request, options);
}
} catch (final Throwable e) {
// usually happen either because we failed to connect to the node
// or because we failed serializing the message
final RequestHolder holderToNotify = clientHandlers.remove(requestId);
// If holderToNotify == null then handler has already been taken care of.
if (holderToNotify != null) {
holderToNotify.cancelTimeout();
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
final SendRequestTransportException sendRequestException = new SendRequestTransportException(node, action, e);
threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() {
@Override
public void onRejection(Throwable t) {
// if we get rejected during node shutdown we don't wanna bubble it up
logger.debug("failed to notify response handler on rejection, action: {}", t, holderToNotify.action());
}
@Override
public void onFailure(Throwable t) {
logger.warn("failed to notify response handler on exception, action: {}", t, holderToNotify.action());
}
@Override
protected void doRun() throws Exception {
holderToNotify.handler().handleException(sendRequestException);
}
});
}
}
}
private void sendLocalRequest(long requestId, final String action, final TransportRequest request) {
final DirectResponseChannel channel = new DirectResponseChannel(logger, localNode, action, requestId, adapter, threadPool);
try {
final RequestHandlerRegistry reg = adapter.getRequestHandler(action);
if (reg == null) {
throw new ActionNotFoundTransportException("Action [" + action + "] not found");
}
final String executor = reg.getExecutor();
if (ThreadPool.Names.SAME.equals(executor)) {
//noinspection unchecked
reg.processMessageReceived(request, channel);
} else {
threadPool.executor(executor).execute(new AbstractRunnable() {
@Override
protected void doRun() throws Exception {
//noinspection unchecked
reg.processMessageReceived(request, channel);
}
@Override
public boolean isForceExecution() {
return reg.isForceExecution();
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Throwable e1) {
logger.warn("failed to notify channel of error message for action [{}]", e1, action);
logger.warn("actual exception", e);
}
}
});
}
} catch (Throwable e) {
try {
channel.sendResponse(e);
} catch (Throwable e1) {
logger.warn("failed to notify channel of error message for action [{}]", e1, action);
logger.warn("actual exception", e1);
}
}
}
private boolean shouldTraceAction(String action) {
if (tracerLogInclude.length > 0) {
if (Regex.simpleMatch(tracerLogInclude, action) == false) {
return false;
}
}
if (tracelLogExclude.length > 0) {
return !Regex.simpleMatch(tracelLogExclude, action);
}
return true;
}
private long newRequestId() {
return requestIds.getAndIncrement();
}
public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception {
return transport.addressesFromString(address, perAddressLimit);
}
/**
* Registers a new request handler
*
* @param action The action the request handler is associated with
* @param requestFactory a callable to be used construct new instances for streaming
* @param executor The executor the request handling will be executed on
* @param handler The handler itself that implements the request handling
*/
public <Request extends TransportRequest> void registerRequestHandler(String action, Supplier<Request> requestFactory, String executor,
TransportRequestHandler<Request> handler) {
RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(
action, requestFactory, taskManager, handler, executor, false, true);
registerRequestHandler(reg);
}
/**
* Registers a new request handler
*
* @param action The action the request handler is associated with
* @param request The request class that will be used to constrcut new instances for streaming
* @param executor The executor the request handling will be executed on
* @param forceExecution Force execution on the executor queue and never reject it
* @param canTripCircuitBreaker Check the request size and raise an exception in case the limit is breached.
* @param handler The handler itself that implements the request handling
*/
public <Request extends TransportRequest> void registerRequestHandler(String action, Supplier<Request> request,
String executor, boolean forceExecution,
boolean canTripCircuitBreaker,
TransportRequestHandler<Request> handler) {
RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(
action, request, taskManager, handler, executor, forceExecution, canTripCircuitBreaker);
registerRequestHandler(reg);
}
protected <Request extends TransportRequest> void registerRequestHandler(RequestHandlerRegistry<Request> reg) {
synchronized (requestHandlerMutex) {
RequestHandlerRegistry replaced = requestHandlers.get(reg.getAction());
requestHandlers = MapBuilder.newMapBuilder(requestHandlers).put(reg.getAction(), reg).immutableMap();
if (replaced != null) {
logger.warn("registered two transport handlers for action {}, handlers: {}, {}", reg.getAction(), reg, replaced);
}
}
}
public void removeHandler(String action) {
synchronized (requestHandlerMutex) {
requestHandlers = MapBuilder.newMapBuilder(requestHandlers).remove(action).immutableMap();
}
}
protected RequestHandlerRegistry getRequestHandler(String action) {
return requestHandlers.get(action);
}
protected class Adapter implements TransportServiceAdapter {
final MeanMetric rxMetric = new MeanMetric();
final MeanMetric txMetric = new MeanMetric();
@Override
public void received(long size) {
rxMetric.inc(size);
}
@Override
public void sent(long size) {
txMetric.inc(size);
}
@Override
public void onRequestSent(DiscoveryNode node, long requestId, String action, TransportRequest request,
TransportRequestOptions options) {
if (traceEnabled() && shouldTraceAction(action)) {
traceRequestSent(node, requestId, action, options);
}
}
protected boolean traceEnabled() {
return tracerLog.isTraceEnabled();
}
@Override
public void onResponseSent(long requestId, String action, TransportResponse response, TransportResponseOptions options) {
if (traceEnabled() && shouldTraceAction(action)) {
traceResponseSent(requestId, action);
}
}
@Override
public void onResponseSent(long requestId, String action, Throwable t) {
if (traceEnabled() && shouldTraceAction(action)) {
traceResponseSent(requestId, action, t);
}
}
protected void traceResponseSent(long requestId, String action, Throwable t) {
tracerLog.trace("[{}][{}] sent error response", t, requestId, action);
}
@Override
public void onRequestReceived(long requestId, String action) {
try {
blockIncomingRequestsLatch.await();
} catch (InterruptedException e) {
logger.trace("interrupted while waiting for incoming requests block to be removed");
}
if (traceEnabled() && shouldTraceAction(action)) {
traceReceivedRequest(requestId, action);
}
}
@Override
public RequestHandlerRegistry getRequestHandler(String action) {
return requestHandlers.get(action);
}
@Override
public TransportResponseHandler onResponseReceived(final long requestId) {
RequestHolder holder = clientHandlers.remove(requestId);
if (holder == null) {
checkForTimeout(requestId);
return null;
}
holder.cancelTimeout();
if (traceEnabled() && shouldTraceAction(holder.action())) {
traceReceivedResponse(requestId, holder.node(), holder.action());
}
return holder.handler();
}
protected void checkForTimeout(long requestId) {
// lets see if its in the timeout holder, but sync on mutex to make sure any ongoing timeout handling has finished
final DiscoveryNode sourceNode;
final String action;
assert clientHandlers.get(requestId) == null;
TimeoutInfoHolder timeoutInfoHolder = timeoutInfoHandlers.remove(requestId);
if (timeoutInfoHolder != null) {
long time = System.currentTimeMillis();
logger.warn("Received response for a request that has timed out, sent [{}ms] ago, timed out [{}ms] ago, " +
"action [{}], node [{}], id [{}]", time - timeoutInfoHolder.sentTime(), time - timeoutInfoHolder.timeoutTime(),
timeoutInfoHolder.action(), timeoutInfoHolder.node(), requestId);
action = timeoutInfoHolder.action();
sourceNode = timeoutInfoHolder.node();
} else {
logger.warn("Transport response handler not found of id [{}]", requestId);
action = null;
sourceNode = null;
}
// call tracer out of lock
if (traceEnabled() == false) {
return;
}
if (action == null) {
assert sourceNode == null;
traceUnresolvedResponse(requestId);
} else if (shouldTraceAction(action)) {
traceReceivedResponse(requestId, sourceNode, action);
}
}
@Override
public void raiseNodeConnected(final DiscoveryNode node) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (TransportConnectionListener connectionListener : connectionListeners) {
connectionListener.onNodeConnected(node);
}
}
});
}
@Override
public void raiseNodeDisconnected(final DiscoveryNode node) {
try {
for (final TransportConnectionListener connectionListener : connectionListeners) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
connectionListener.onNodeDisconnected(node);
}
});
}
for (Map.Entry<Long, RequestHolder> entry : clientHandlers.entrySet()) {
RequestHolder holder = entry.getValue();
if (holder.node().equals(node)) {
final RequestHolder holderToNotify = clientHandlers.remove(entry.getKey());
if (holderToNotify != null) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
holderToNotify.handler().handleException(new NodeDisconnectedException(node, holderToNotify.action()));
}
});
}
}
}
} catch (EsRejectedExecutionException ex) {
logger.debug("Rejected execution on NodeDisconnected", ex);
}
}
protected void traceReceivedRequest(long requestId, String action) {
tracerLog.trace("[{}][{}] received request", requestId, action);
}
protected void traceResponseSent(long requestId, String action) {
tracerLog.trace("[{}][{}] sent response", requestId, action);
}
protected void traceReceivedResponse(long requestId, DiscoveryNode sourceNode, String action) {
tracerLog.trace("[{}][{}] received response from [{}]", requestId, action, sourceNode);
}
protected void traceUnresolvedResponse(long requestId) {
tracerLog.trace("[{}] received response but can't resolve it to a request", requestId);
}
protected void traceRequestSent(DiscoveryNode node, long requestId, String action, TransportRequestOptions options) {
tracerLog.trace("[{}][{}] sent to [{}] (timeout: [{}])", requestId, action, node, options.timeout());
}
}
class TimeoutHandler implements Runnable {
private final long requestId;
private final long sentTime = System.currentTimeMillis();
volatile ScheduledFuture future;
TimeoutHandler(long requestId) {
this.requestId = requestId;
}
@Override
public void run() {
// we get first to make sure we only add the TimeoutInfoHandler if needed.
final RequestHolder holder = clientHandlers.get(requestId);
if (holder != null) {
// add it to the timeout information holder, in case we are going to get a response later
long timeoutTime = System.currentTimeMillis();
timeoutInfoHandlers.put(requestId, new TimeoutInfoHolder(holder.node(), holder.action(), sentTime, timeoutTime));
// now that we have the information visible via timeoutInfoHandlers, we try to remove the request id
final RequestHolder removedHolder = clientHandlers.remove(requestId);
if (removedHolder != null) {
assert removedHolder == holder : "two different holder instances for request [" + requestId + "]";
removedHolder.handler().handleException(
new ReceiveTimeoutTransportException(holder.node(), holder.action(),
"request_id [" + requestId + "] timed out after [" + (timeoutTime - sentTime) + "ms]"));
} else {
// response was processed, remove timeout info.
timeoutInfoHandlers.remove(requestId);
}
}
}
/**
* cancels timeout handling. this is a best effort only to avoid running it. remove the requestId from {@link #clientHandlers}
* to make sure this doesn't run.
*/
public void cancel() {
assert clientHandlers.get(requestId) == null :
"cancel must be called after the requestId [" + requestId + "] has been removed from clientHandlers";
FutureUtils.cancel(future);
}
}
static class TimeoutInfoHolder {
private final DiscoveryNode node;
private final String action;
private final long sentTime;
private final long timeoutTime;
TimeoutInfoHolder(DiscoveryNode node, String action, long sentTime, long timeoutTime) {
this.node = node;
this.action = action;
this.sentTime = sentTime;
this.timeoutTime = timeoutTime;
}
public DiscoveryNode node() {
return node;
}
public String action() {
return action;
}
public long sentTime() {
return sentTime;
}
public long timeoutTime() {
return timeoutTime;
}
}
static class RequestHolder<T extends TransportResponse> {
private final TransportResponseHandler<T> handler;
private final DiscoveryNode node;
private final String action;
private final TimeoutHandler timeoutHandler;
RequestHolder(TransportResponseHandler<T> handler, DiscoveryNode node, String action, TimeoutHandler timeoutHandler) {
this.handler = handler;
this.node = node;
this.action = action;
this.timeoutHandler = timeoutHandler;
}
public TransportResponseHandler<T> handler() {
return handler;
}
public DiscoveryNode node() {
return this.node;
}
public String action() {
return this.action;
}
public void cancelTimeout() {
if (timeoutHandler != null) {
timeoutHandler.cancel();
}
}
}
/**
* This handler wrapper ensures that the response thread executes with the correct thread context. Before any of the4 handle methods
* are invoked we restore the context.
*/
private final static class ContextRestoreResponseHandler<T extends TransportResponse> implements TransportResponseHandler<T> {
private final TransportResponseHandler<T> delegate;
private final ThreadContext.StoredContext threadContext;
private ContextRestoreResponseHandler(ThreadContext.StoredContext threadContext, TransportResponseHandler<T> delegate) {
this.delegate = delegate;
this.threadContext = threadContext;
}
@Override
public T newInstance() {
return delegate.newInstance();
}
@Override
public void handleResponse(T response) {
threadContext.restore();
delegate.handleResponse(response);
}
@Override
public void handleException(TransportException exp) {
threadContext.restore();
delegate.handleException(exp);
}
@Override
public String executor() {
return delegate.executor();
}
}
static class DirectResponseChannel implements TransportChannel {
final ESLogger logger;
final DiscoveryNode localNode;
final private String action;
final private long requestId;
final TransportServiceAdapter adapter;
final ThreadPool threadPool;
public DirectResponseChannel(ESLogger logger, DiscoveryNode localNode, String action, long requestId,
TransportServiceAdapter adapter, ThreadPool threadPool) {
this.logger = logger;
this.localNode = localNode;
this.action = action;
this.requestId = requestId;
this.adapter = adapter;
this.threadPool = threadPool;
}
@Override
public String action() {
return action;
}
@Override
public String getProfileName() {
return DIRECT_RESPONSE_PROFILE;
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
sendResponse(response, TransportResponseOptions.EMPTY);
}
@Override
public void sendResponse(final TransportResponse response, TransportResponseOptions options) throws IOException {
final TransportResponseHandler handler = adapter.onResponseReceived(requestId);
// ignore if its null, the adapter logs it
if (handler != null) {
final String executor = handler.executor();
if (ThreadPool.Names.SAME.equals(executor)) {
processResponse(handler, response);
} else {
threadPool.executor(executor).execute(new Runnable() {
@SuppressWarnings({"unchecked"})
@Override
public void run() {
processResponse(handler, response);
}
});
}
}
}
@SuppressWarnings("unchecked")
protected void processResponse(TransportResponseHandler handler, TransportResponse response) {
try {
handler.handleResponse(response);
} catch (Throwable e) {
processException(handler, wrapInRemote(new ResponseHandlerFailureTransportException(e)));
}
}
@Override
public void sendResponse(Throwable error) throws IOException {
final TransportResponseHandler handler = adapter.onResponseReceived(requestId);
// ignore if its null, the adapter logs it
if (handler != null) {
final RemoteTransportException rtx = wrapInRemote(error);
final String executor = handler.executor();
if (ThreadPool.Names.SAME.equals(executor)) {
processException(handler, rtx);
} else {
threadPool.executor(handler.executor()).execute(new Runnable() {
@SuppressWarnings({"unchecked"})
@Override
public void run() {
processException(handler, rtx);
}
});
}
}
}
protected RemoteTransportException wrapInRemote(Throwable t) {
if (t instanceof RemoteTransportException) {
return (RemoteTransportException) t;
}
return new RemoteTransportException(localNode.getName(), localNode.getAddress(), action, t);
}
protected void processException(final TransportResponseHandler handler, final RemoteTransportException rtx) {
try {
handler.handleException(rtx);
} catch (Throwable e) {
logger.error("failed to handle exception for action [{}], handler [{}]", e, action, handler);
}
}
@Override
public long getRequestId() {
return requestId;
}
@Override
public String getChannelType() {
return "direct";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.util;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
/**
* A DocumentStore wrapper that can be used to log and also time DocumentStore
* calls.
*/
public class TimingDocumentStoreWrapper implements DocumentStore {
private static final boolean DEBUG = Boolean.parseBoolean(System.getProperty("base.debug", "true"));
private static final AtomicInteger NEXT_ID = new AtomicInteger();
private final DocumentStore base;
private final int id = NEXT_ID.getAndIncrement();
private long startTime;
private final Map<String, Count> counts = new HashMap<String, Count>();
private long lastLogTime;
private long totalLogTime;
private final Map<String, Integer> slowCalls = new ConcurrentHashMap<String, Integer>();
private int callCount;
/**
* A class that keeps track of timing data and call counts.
*/
static class Count {
public long count;
public long max;
public long total;
public long paramSize;
public long resultSize;
void update(long time, int paramSize, int resultSize) {
count++;
if (time > max) {
max = time;
}
total += time;
this.paramSize += paramSize;
this.resultSize += resultSize;
}
}
public TimingDocumentStoreWrapper(DocumentStore base) {
this.base = base;
lastLogTime = now();
}
private boolean logCommonCall() {
return callCount % 10 == 0;
}
@Override
@CheckForNull
public <T extends Document> T find(Collection<T> collection, String key) {
try {
long start = now();
T result = base.find(collection, key);
updateAndLogTimes("find", start, 0, size(result));
if (logCommonCall()) {
logCommonCall(start, "find " + collection + " " + key);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
@CheckForNull
public <T extends Document> T find(Collection<T> collection, String key, int maxCacheAge) {
try {
long start = now();
T result = base.find(collection, key, maxCacheAge);
updateAndLogTimes("find2", start, 0, size(result));
if (logCommonCall()) {
logCommonCall(start, "find2 " + collection + " " + key);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
@Nonnull
public <T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
int limit) {
try {
long start = now();
List<T> result = base.query(collection, fromKey, toKey, limit);
if (result.size() == 0) {
updateAndLogTimes("query, result=0", start, 0, size(result));
} else if (result.size() == 1) {
updateAndLogTimes("query, result=1", start, 0, size(result));
} else {
updateAndLogTimes("query, result>1", start, 0, size(result));
}
if (logCommonCall()) {
logCommonCall(start, "query " + collection + " " + fromKey + " " + toKey + " " + limit);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
@Nonnull
public <T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
String indexedProperty,
long startValue,
int limit) {
try {
long start = now();
List<T> result = base.query(collection, fromKey, toKey, indexedProperty, startValue, limit);
updateAndLogTimes("query2", start, 0, size(result));
if (logCommonCall()) {
logCommonCall(start, "query2 " + collection + " " + fromKey + " " + toKey + " " + indexedProperty + " " + startValue + " " + limit);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> void remove(Collection<T> collection, String key) {
try {
long start = now();
base.remove(collection, key);
updateAndLogTimes("remove", start, 0, 0);
if (logCommonCall()) {
logCommonCall(start, "remove " + collection + " " + key);
}
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> keys) {
try {
long start = now();
base.remove(collection, keys);
updateAndLogTimes("remove", start, 0, 0);
if (logCommonCall()) {
logCommonCall(start, "remove " + collection + " " + keys);
}
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> int remove(Collection<T> collection,
Map<String, Map<UpdateOp.Key, UpdateOp.Condition>> toRemove) {
try {
long start = now();
int result = base.remove(collection, toRemove);
updateAndLogTimes("remove", start, 0, 0);
if (logCommonCall()) {
logCommonCall(start, "remove " + collection + " " + toRemove);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) {
try {
long start = now();
boolean result = base.create(collection, updateOps);
updateAndLogTimes("create", start, 0, 0);
if (logCommonCall()) {
logCommonCall(start, "create " + collection);
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> void update(Collection<T> collection,
List<String> keys,
UpdateOp updateOp) {
try {
long start = now();
base.update(collection, keys, updateOp);
updateAndLogTimes("update", start, 0, 0);
if (logCommonCall()) {
logCommonCall(start, "update " + collection);
}
} catch (Exception e) {
throw convert(e);
}
}
@Override
@CheckForNull
public <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update) {
try {
long start = now();
T result = base.createOrUpdate(collection, update);
updateAndLogTimes("createOrUpdate", start, 0, size(result));
if (logCommonCall()) {
logCommonCall(start, "createOrUpdate " + collection + " " + update.getId());
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
@CheckForNull
public <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update) {
try {
long start = now();
T result = base.findAndUpdate(collection, update);
updateAndLogTimes("findAndUpdate", start, 0, size(result));
if (logCommonCall()) {
logCommonCall(start, "findAndUpdate " + collection + " " + update.getId());
}
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public CacheInvalidationStats invalidateCache() {
try {
long start = now();
CacheInvalidationStats result = base.invalidateCache();
updateAndLogTimes("invalidateCache", start, 0, 0);
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String key) {
try {
long start = now();
base.invalidateCache(collection, key);
updateAndLogTimes("invalidateCache2", start, 0, 0);
} catch (Exception e) {
throw convert(e);
}
}
@Override
public void dispose() {
try {
long start = now();
base.dispose();
updateAndLogTimes("dispose", start, 0, 0);
} catch (Exception e) {
throw convert(e);
}
}
@Override
public <T extends Document> T getIfCached(Collection<T> collection, String key) {
try {
long start = now();
T result = base.getIfCached(collection, key);
updateAndLogTimes("isCached", start, 0, 0);
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
try {
long start = now();
base.setReadWriteMode(readWriteMode);
updateAndLogTimes("setReadWriteMode", start, 0, 0);
} catch (Exception e) {
throw convert(e);
}
}
@Override
public CacheStats getCacheStats() {
try {
long start = now();
CacheStats result = base.getCacheStats();
updateAndLogTimes("getCacheStats", start, 0, 0);
return result;
} catch (Exception e) {
throw convert(e);
}
}
@Override
public Map<String, String> getMetadata() {
return base.getMetadata();
}
private void logCommonCall(long start, String key) {
int time = (int) (System.currentTimeMillis() - start);
if (time <= 0) {
return;
}
Map<String, Integer> map = slowCalls;
Integer oldCount = map.get(key);
if (oldCount == null) {
map.put(key, time);
} else {
map.put(key, oldCount + time);
}
int maxElements = 1000;
int minCount = 1;
while (map.size() > maxElements) {
for (Iterator<Map.Entry<String, Integer>> ei = map.entrySet().iterator(); ei.hasNext();) {
Map.Entry<String, Integer> e = ei.next();
if (e.getValue() <= minCount) {
ei.remove();
}
}
if (map.size() > maxElements) {
minCount++;
}
}
}
private static RuntimeException convert(Exception e) {
if (e instanceof RuntimeException) {
return (RuntimeException) e;
}
return new DocumentStoreException("Unexpected exception: " + e.toString(), e);
}
private void log(String message) {
if (DEBUG) {
System.out.println("[" + id + "] " + message);
}
}
private static <T extends Document> int size(List<T> list) {
int result = 0;
for (T doc : list) {
result += doc.getMemory();
}
return result;
}
private static int size(@Nullable Document document) {
if (document == null) {
return 0;
} else {
return document.getMemory();
}
}
private static long now() {
return System.currentTimeMillis();
}
private void updateAndLogTimes(String operation, long start, int paramSize, int resultSize) {
long now = now();
if (startTime == 0) {
startTime = now;
}
Count c = counts.get(operation);
if (c == null) {
c = new Count();
counts.put(operation, c);
}
c.update(now - start, paramSize, resultSize);
long t = now - lastLogTime;
if (t >= 10000) {
totalLogTime += t;
lastLogTime = now;
long totalCount = 0, totalTime = 0;
for (Count count : counts.values()) {
totalCount += count.count;
totalTime += count.total;
}
totalCount = Math.max(1, totalCount);
totalTime = Math.max(1, totalTime);
for (Entry<String, Count> e : counts.entrySet()) {
c = e.getValue();
long count = c.count;
long total = c.total;
long in = c.paramSize / 1024 / 1024;
long out = c.resultSize / 1024 / 1024;
if (count > 0) {
log(e.getKey() +
" count " + count +
" " + (100 * count / totalCount) + "%" +
" in " + in + " out " + out +
" time " + total +
" " + (100 * total / totalTime) + "%");
}
}
log("all count " + totalCount + " time " + totalTime + " " +
(100 * totalTime / totalLogTime) + "%");
Map<String, Integer> map = slowCalls;
int top = 10;
int max = Integer.MAX_VALUE;
for (int i = 0; i < top;) {
int best = 0;
for (int x : map.values()) {
if (x < max && x > best) {
best = x;
}
}
for (Entry<String, Integer> e : map.entrySet()) {
if (e.getValue() >= best && e.getValue() < max) {
log("slow call " + e.getValue() + " millis: " + e.getKey());
i++;
if (i >= top) {
break;
}
}
}
if (i >= map.size()) {
break;
}
max = best;
}
slowCalls.clear();
log("------");
}
}
}
| |
/*
* Copyright 2014, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.mockito.Matchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import io.grpc.ClientInterceptors.ForwardingCall;
import io.grpc.ClientInterceptors.ForwardingListener;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** Unit tests for {@link ClientInterceptors}. */
@RunWith(JUnit4.class)
public class ClientInterceptorsTest {
@Mock
private Channel channel;
@Mock
private Call<String, Integer> call;
@Mock
private MethodDescriptor<String, Integer> method;
@Before public void setUp() {
MockitoAnnotations.initMocks(this);
when(channel.newCall(Mockito.<MethodDescriptor<String, Integer>>any())).thenReturn(call);
}
@Test(expected = NullPointerException.class)
public void npeForNullChannel() {
ClientInterceptors.intercept(null, Arrays.<ClientInterceptor>asList());
}
@Test(expected = NullPointerException.class)
public void npeForNullInterceptorList() {
ClientInterceptors.intercept(channel, (List<ClientInterceptor>) null);
}
@Test(expected = NullPointerException.class)
public void npeForNullInterceptor() {
ClientInterceptors.intercept(channel, (ClientInterceptor) null);
}
@Test
public void noop() {
assertSame(channel, ClientInterceptors.intercept(channel, Arrays.<ClientInterceptor>asList()));
}
@Test
public void channelAndInterceptorCalled() {
ClientInterceptor interceptor = spy(new NoopInterceptor());
Channel intercepted = ClientInterceptors.intercept(channel, interceptor);
// First call
assertSame(call, intercepted.newCall(method));
verify(channel).newCall(same(method));
verify(interceptor).interceptCall(same(method), Mockito.<Channel>any());
verifyNoMoreInteractions(channel, interceptor);
// Second call
assertSame(call, intercepted.newCall(method));
verify(channel, times(2)).newCall(same(method));
verify(interceptor, times(2)).interceptCall(same(method), Mockito.<Channel>any());
verifyNoMoreInteractions(channel, interceptor);
}
@Test(expected = IllegalStateException.class)
public void callNextTwice() {
ClientInterceptor interceptor = new ClientInterceptor() {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
next.newCall(method);
return next.newCall(method);
}
};
Channel intercepted = ClientInterceptors.intercept(channel, interceptor);
intercepted.newCall(method);
}
@Test
public void ordered() {
final List<String> order = new ArrayList<String>();
channel = new Channel() {
@SuppressWarnings("unchecked")
@Override
public <ReqT, RespT> Call<ReqT, RespT> newCall(MethodDescriptor<ReqT, RespT> method) {
order.add("channel");
return (Call<ReqT, RespT>) call;
}
};
ClientInterceptor interceptor1 = new ClientInterceptor() {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
order.add("i1");
return next.newCall(method);
}
};
ClientInterceptor interceptor2 = new ClientInterceptor() {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
order.add("i2");
return next.newCall(method);
}
};
Channel intercepted = ClientInterceptors.intercept(channel, interceptor1, interceptor2);
assertSame(call, intercepted.newCall(method));
assertEquals(Arrays.asList("i1", "i2", "channel"), order);
}
@Test
public void addOutboundHeaders() {
final Metadata.Key<String> credKey = Metadata.Key.of("Cred", Metadata.ASCII_STRING_MARSHALLER);
ClientInterceptor interceptor = new ClientInterceptor() {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
Call<ReqT, RespT> call = next.newCall(method);
return new ForwardingCall<ReqT, RespT>(call) {
@Override
public void start(Call.Listener<RespT> responseListener, Metadata.Headers headers) {
headers.put(credKey, "abcd");
super.start(responseListener, headers);
}
};
}
};
Channel intercepted = ClientInterceptors.intercept(channel, interceptor);
@SuppressWarnings("unchecked")
Call.Listener<Integer> listener = mock(Call.Listener.class);
Call<String, Integer> interceptedCall = intercepted.newCall(method);
// start() on the intercepted call will eventually reach the call created by the real channel
interceptedCall.start(listener, new Metadata.Headers());
ArgumentCaptor<Metadata.Headers> captor = ArgumentCaptor.forClass(Metadata.Headers.class);
// The headers passed to the real channel call will contain the information inserted by the
// interceptor.
verify(call).start(same(listener), captor.capture());
assertEquals("abcd", captor.getValue().get(credKey));
}
@Test
public void examineInboundHeaders() {
final List<Metadata.Headers> examinedHeaders = new ArrayList<Metadata.Headers>();
ClientInterceptor interceptor = new ClientInterceptor() {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
Call<ReqT, RespT> call = next.newCall(method);
return new ForwardingCall<ReqT, RespT>(call) {
@Override
public void start(Call.Listener<RespT> responseListener, Metadata.Headers headers) {
super.start(new ForwardingListener<RespT>(responseListener) {
@Override
public void onHeaders(Metadata.Headers headers) {
examinedHeaders.add(headers);
super.onHeaders(headers);
}
}, headers);
}
};
}
};
Channel intercepted = ClientInterceptors.intercept(channel, interceptor);
@SuppressWarnings("unchecked")
Call.Listener<Integer> listener = mock(Call.Listener.class);
Call<String, Integer> interceptedCall = intercepted.newCall(method);
interceptedCall.start(listener, new Metadata.Headers());
// Capture the underlying call listener that will receive headers from the transport.
ArgumentCaptor<Call.Listener<Integer>> captor = ArgumentCaptor.forClass(null);
verify(call).start(captor.capture(), Mockito.<Metadata.Headers>any());
Metadata.Headers inboundHeaders = new Metadata.Headers();
// Simulate that a headers arrives on the underlying call listener.
captor.getValue().onHeaders(inboundHeaders);
assertEquals(Arrays.asList(inboundHeaders), examinedHeaders);
}
private static class NoopInterceptor implements ClientInterceptor {
@Override
public <ReqT, RespT> Call<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
Channel next) {
return next.newCall(method);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.pages;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.wicket.Page;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.markup.MarkupException;
import org.apache.wicket.markup.MarkupStream;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.basic.MultiLineLabel;
import org.apache.wicket.markup.html.debug.PageView;
import org.apache.wicket.markup.html.link.Link;
import org.apache.wicket.request.http.WebResponse;
import org.apache.wicket.util.lang.Generics;
/**
* Shows a runtime exception on a nice HTML page.
*
* @author Jonathan Locke
*/
public class ExceptionErrorPage extends AbstractErrorPage
{
private static final long serialVersionUID = 1L;
/** Keep a reference to the root cause. WicketTester will use it */
private final transient Throwable throwable;
/**
* Constructor.
*
* @param throwable
* The exception to show
* @param page
* The page being rendered when the exception was thrown
*/
public ExceptionErrorPage(final Throwable throwable, final Page page)
{
this.throwable = throwable;
// Add exception label
add(new MultiLineLabel("exception", getErrorMessage(throwable)));
add(new MultiLineLabel("stacktrace", getStackTrace(throwable)));
// Get values
String resource = "";
String markup = "";
MarkupStream markupStream = null;
if (throwable instanceof MarkupException)
{
markupStream = ((MarkupException)throwable).getMarkupStream();
if (markupStream != null)
{
markup = markupStream.toHtmlDebugString();
resource = markupStream.getResource().toString();
}
}
// Create markup label
final MultiLineLabel markupLabel = new MultiLineLabel("markup", markup);
markupLabel.setEscapeModelStrings(false);
// Add container with markup highlighted
final WebMarkupContainer markupHighlight = new WebMarkupContainer("markupHighlight");
markupHighlight.add(markupLabel);
markupHighlight.add(new Label("resource", resource));
add(markupHighlight);
// Show container if markup stream is available
markupHighlight.setVisible(markupStream != null);
add(new Link<Void>("displayPageViewLink")
{
private static final long serialVersionUID = 1L;
@Override
public void onClick()
{
ExceptionErrorPage.this.replace(new PageView("componentTree", page));
setVisible(false);
}
});
add(new Label("componentTree", ""));
}
/**
* Converts a Throwable to a string.
*
* @param throwable
* The throwable
* @return The string
*/
public String getErrorMessage(final Throwable throwable)
{
if (throwable != null)
{
StringBuilder sb = new StringBuilder(256);
// first print the last cause
List<Throwable> al = convertToList(throwable);
int length = al.size() - 1;
Throwable cause = al.get(length);
sb.append("Last cause: ").append(cause.getMessage()).append('\n');
if (throwable instanceof WicketRuntimeException)
{
String msg = throwable.getMessage();
if ((msg != null) && (msg.equals(cause.getMessage()) == false))
{
if (throwable instanceof MarkupException)
{
MarkupStream stream = ((MarkupException)throwable).getMarkupStream();
if (stream != null)
{
String text = "\n" + stream.toString();
if (msg.endsWith(text))
{
msg = msg.substring(0, msg.length() - text.length());
}
}
}
sb.append("WicketMessage: ");
sb.append(msg);
sb.append("\n\n");
}
}
return sb.toString();
}
else
{
return "[Unknown]";
}
}
/**
* Converts a Throwable to a string.
*
* @param throwable
* The throwable
* @return The string
*/
public String getStackTrace(final Throwable throwable)
{
if (throwable != null)
{
List<Throwable> al = convertToList(throwable);
StringBuilder sb = new StringBuilder(256);
// first print the last cause
int length = al.size() - 1;
Throwable cause = al.get(length);
sb.append("Root cause:\n\n");
outputThrowable(cause, sb, false);
if (length > 0)
{
sb.append("\n\nComplete stack:\n\n");
for (int i = 0; i < length; i++)
{
outputThrowable(al.get(i), sb, true);
sb.append("\n");
}
}
return sb.toString();
}
else
{
return "<Null Throwable>";
}
}
/**
* @param throwable
* @return xxx
*/
private List<Throwable> convertToList(final Throwable throwable)
{
List<Throwable> al = Generics.newArrayList();
Throwable cause = throwable;
al.add(cause);
while ((cause.getCause() != null) && (cause != cause.getCause()))
{
cause = cause.getCause();
al.add(cause);
}
return al;
}
/**
* Outputs the throwable and its stacktrace to the stringbuffer. If stopAtWicketSerlvet is true
* then the output will stop when the org.apache.wicket servlet is reached. sun.reflect.
* packages are filtered out.
*
* @param cause
* @param sb
* @param stopAtWicketServlet
*/
private void outputThrowable(Throwable cause, StringBuilder sb, boolean stopAtWicketServlet)
{
sb.append(cause);
sb.append("\n");
StackTraceElement[] trace = cause.getStackTrace();
for (int i = 0; i < trace.length; i++)
{
String traceString = trace[i].toString();
if (!(traceString.startsWith("sun.reflect.") && i > 1))
{
sb.append(" at ");
sb.append(traceString);
sb.append("\n");
if (stopAtWicketServlet &&
(traceString.startsWith("org.apache.wicket.protocol.http.WicketServlet") || traceString.startsWith("org.apache.wicket.protocol.http.WicketFilter")))
{
return;
}
}
}
}
@Override
protected void setHeaders(final WebResponse response)
{
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
/**
* Get access to the exception
*
* @return The exception
*/
public Throwable getThrowable()
{
return throwable;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Norwegian Defence Research Establishment / NTNU
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package no.ntnu.okse.core.topic;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.HashSet;
import static org.testng.Assert.*;
public class TopicTest {
Topic noNameNoTypeTopic;
Topic namedAndTypedTopic;
Topic childOne;
Topic childTwo;
Topic childThree;
Topic rootOne;
Topic rootTwo;
@BeforeMethod
public void setUp() throws Exception {
noNameNoTypeTopic = new Topic();
namedAndTypedTopic = new Topic("SomeName", "SomeType");
childOne = new Topic("ChildOne", "Topic");
childTwo = new Topic("ChildTwo", "Topic");
childThree = new Topic("ChildThree", "Topic");
rootOne = new Topic("RootOne", "Topic");
rootTwo = new Topic("RootTwo", "Topic");
rootOne.addChild(childOne);
rootOne.addChild(childTwo);
rootTwo.addChild(childThree);
}
@AfterMethod
public void tearDown() throws Exception {
}
@Test
public void testGetTopicID() throws Exception {
Topic one = new Topic();
HashSet<String> ids = new HashSet<>();
assertNotNull(one.getTopicID());
// Hex regex
assertTrue(one.getTopicID().matches("-?[0-9a-fA-F]+"));
// Do a mass test and check for colliding ID's
for (int i = 0; i < 1337; i++) {
one = new Topic();
ids.add(one.getTopicID());
}
assertEquals(ids.size(), 1337);
}
@Test
public void testGetName() throws Exception {
assertEquals(noNameNoTypeTopic.getName(), "UNNAMED");
assertEquals(namedAndTypedTopic.getName(), "SomeName");
}
@Test
public void testGetNameIgnoreCase() throws Exception {
assertEquals(noNameNoTypeTopic.getNameIgnoreCase(), "unnamed");
assertEquals(namedAndTypedTopic.getNameIgnoreCase(), "somename");
}
@Test
public void testSetName() throws Exception {
noNameNoTypeTopic.setName("SomeOtherName");
assertEquals(noNameNoTypeTopic.getName(), "SomeOtherName");
assertEquals(noNameNoTypeTopic.getNameIgnoreCase(), "someothername");
}
@Test
public void testGetType() throws Exception {
assertEquals(noNameNoTypeTopic.getType(), "UNKNOWN");
}
@Test
public void testSetType() throws Exception {
noNameNoTypeTopic.setType("SomeTopic");
assertEquals(noNameNoTypeTopic.getType(), "SomeTopic");
}
@Test
public void testGetParent() throws Exception {
assertEquals(childOne.getParent(), rootOne);
assertEquals(childTwo.getParent(), rootOne);
assertEquals(childThree.getParent(), rootTwo);
}
@Test
public void testSetParent() throws Exception {
assertEquals(childThree.getParent(), rootTwo);
assertEquals(rootOne.getChildren().size(), 2);
childThree.setParent(rootOne);
assertEquals(childThree.getParent(), rootOne);
assertEquals(rootOne.getChildren().size(), 3);
assertEquals(rootTwo.getChildren().size(), 0);
childThree.setParent(null);
assertFalse(rootOne.getChildren().contains(childThree));
assertEquals(childThree.getParent(), null);
}
@Test
public void testAddChild() throws Exception {
Topic childFour = new Topic();
Topic childFive = new Topic();
assertEquals(childFour.getParent(), null);
assertEquals(childFive.getParent(), null);
int origChildCount = rootTwo.getChildren().size();
rootTwo.addChild(childFour);
rootTwo.addChild(childFive);
assertEquals(rootTwo.getChildren().size(), origChildCount + 2);
assertTrue(rootTwo.getChildren().contains(childFour));
assertTrue(rootTwo.getChildren().contains(childFive));
assertEquals(childFour.getParent(), rootTwo);
assertEquals(childFive.getParent(), rootTwo);
}
@Test
public void testRemoveChild() throws Exception {
Topic childSix = new Topic();
rootTwo.addChild(childSix);
assertTrue(rootTwo.getChildren().contains(childSix));
assertEquals(childSix.getParent(), rootTwo);
rootTwo.removeChild(childSix);
}
@Test
public void testGetChildren() throws Exception {
HashSet<Topic> localChildren = rootTwo.getChildren();
assertFalse(rootTwo.getChildren() == localChildren);
rootTwo.getChildren().stream().forEach(c -> assertTrue(localChildren.contains(c)));
Topic childSeven = new Topic();
Topic childEight = new Topic();
localChildren.add(childSeven);
localChildren.add(childEight);
rootTwo.addChild(childSeven);
rootTwo.addChild(childEight);
for (Topic t: rootTwo.getChildren()) {
assertTrue(localChildren.contains(t));
}
}
@Test
public void testClearChildren() throws Exception {
HashSet<Topic> localChildren = rootTwo.getChildren();
localChildren.stream().forEach(c -> assertEquals(c.getParent(), rootTwo));
rootTwo.clearChildren();
assertEquals(rootTwo.getChildren().size(), 0);
localChildren.stream().forEach(c -> assertEquals(c.getParent(), null));
}
@Test
public void testIsRoot() throws Exception {
assertTrue(rootOne.isRoot());
assertTrue(rootTwo.isRoot());
Topic t = new Topic();
rootOne.setParent(t);
assertFalse(rootOne.isRoot());
assertTrue(t.isRoot());
}
@Test
public void testIsLeaf() throws Exception {
assertTrue(childOne.isLeaf());
assertTrue(childTwo.isLeaf());
assertTrue(childThree.isLeaf());
Topic t = new Topic();
childThree.addChild(t);
assertFalse(childThree.isLeaf());
assertFalse(childThree.isRoot());
assertTrue(t.isLeaf());
}
@Test
public void testGetFullTopicString() throws Exception {
Topic childTen = new Topic();
Topic childEleven = new Topic();
childTen.setName("ChildTen");
childEleven.setName("ChildEleven");
childTen.addChild(childEleven);
childThree.addChild(childTen);
String fullTopicForChildEleven = "RootTwo/ChildThree/ChildTen/ChildEleven";
String fullTopicForChildTen = "RootTwo/ChildThree/ChildTen";
String fullTopicForChildThree = "RootTwo/ChildThree";
assertEquals(childEleven.getFullTopicString(), fullTopicForChildEleven);
assertEquals(childTen.getFullTopicString(), fullTopicForChildTen);
assertEquals(childThree.getFullTopicString(), fullTopicForChildThree);
}
@Test
public void testGetFullTopicStringIgnoreCase() throws Exception {
Topic childTen = new Topic();
Topic childEleven = new Topic();
childTen.setName("ChildTen");
childEleven.setName("ChildEleven");
childTen.addChild(childEleven);
childThree.addChild(childTen);
String fullTopicForChildEleven = "roottwo/childthree/childten/childeleven";
String fullTopicForChildTen = "roottwo/childthree/childten";
String fullTopicForChildThree = "roottwo/childthree";
assertEquals(childEleven.getFullTopicStringIgnoreCase(), fullTopicForChildEleven);
assertEquals(childTen.getFullTopicStringIgnoreCase(), fullTopicForChildTen);
assertEquals(childThree.getFullTopicStringIgnoreCase(), fullTopicForChildThree);
}
@Test
public void testIsAncestorOf() throws Exception {
Topic parent = new Topic();
Topic child = new Topic();
Topic grandchild = new Topic();
grandchild.setParent(child);
child.setParent(parent);
assertTrue(parent.isAncestorOf(child));
assertTrue(parent.isAncestorOf(grandchild));
assertFalse(parent.isAncestorOf(parent));
}
@Test
public void testIsDescendantOf() throws Exception {
Topic parent = new Topic();
Topic child = new Topic();
Topic grandchild = new Topic();
grandchild.setParent(child);
child.setParent(parent);
assertTrue(grandchild.isDescendantOf(child));
assertTrue(grandchild.isDescendantOf(parent));
assertFalse(grandchild.isDescendantOf(grandchild));
}
@Test
public void testToString() throws Exception {
Topic parent = new Topic("parent", "TEST");
Topic child = new Topic("child", "TEST");
child.setParent(parent);
assertEquals(child.toString(), "Topic{parent/child}");
assertEquals(parent.toString(), "Topic{parent}");
}
}
| |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo.builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.core.common.BaseNode;
import org.drools.core.common.BetaConstraints;
import org.drools.core.common.EmptyBetaConstraints;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.reteoo.AlphaNode;
import org.drools.core.reteoo.BetaNode;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.LeftTupleNode;
import org.drools.core.reteoo.NodeTypeEnums;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.rule.AbstractCompositeConstraint;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.GroupElement;
import org.drools.core.rule.IntervalProviderConstraint;
import org.drools.core.rule.Pattern;
import org.drools.core.rule.RuleConditionElement;
import org.drools.core.spi.AlphaNodeFieldConstraint;
import org.drools.core.spi.BetaNodeFieldConstraint;
import org.drools.core.spi.ObjectType;
import org.drools.core.time.Interval;
import org.drools.core.time.TemporalDependencyMatrix;
import org.drools.core.time.TimeUtils;
import org.kie.api.definition.rule.Rule;
/**
* Utility functions for reteoo build
*/
public class BuildUtils {
private final Map<Class< ? >, ReteooComponentBuilder> componentBuilders = new HashMap<Class< ? >, ReteooComponentBuilder>();
/**
* Adds the given builder for the given target to the builders map
*/
public void addBuilder(final Class< ? > target,
final ReteooComponentBuilder builder) {
this.componentBuilders.put( target,
builder );
}
/**
* Returns a builder for the given target from the builders map
*/
public ReteooComponentBuilder getBuilderFor(final RuleConditionElement target) {
return getBuilderFor( target.getClass() );
}
public ReteooComponentBuilder getBuilderFor(final Class cls) {
ReteooComponentBuilder builder = this.componentBuilders.get( cls );
return builder != null || cls.getSuperclass() == null ? builder : getBuilderFor(cls.getSuperclass());
}
/**
* Attaches a node into the network. If a node already exists that could
* substitute, it is used instead.
*
* @param context
* The current build context
* @param candidate
* The node to attach.
*
* @return the actual attached node that may be the one given as parameter
* or eventually one that was already in the cache if sharing is enabled
*/
public <T extends BaseNode> T attachNode(BuildContext context, T candidate) {
BaseNode node = null;
RuleBasePartitionId partition = null;
if ( candidate.getType() == NodeTypeEnums.EntryPointNode ) {
// entry point nodes are always shared
node = context.getKnowledgeBase().getRete().getEntryPointNode( ((EntryPointNode) candidate).getEntryPoint() );
// all EntryPointNodes belong to the main partition
partition = RuleBasePartitionId.MAIN_PARTITION;
} else if ( candidate.getType() == NodeTypeEnums.ObjectTypeNode ) {
// object type nodes are always shared
Map<ObjectType, ObjectTypeNode> map = context.getKnowledgeBase().getRete().getObjectTypeNodes( context.getCurrentEntryPoint() );
if ( map != null ) {
ObjectTypeNode otn = map.get( ((ObjectTypeNode) candidate).getObjectType() );
if ( otn != null ) {
// adjusting expiration offset
otn.mergeExpirationOffset( (ObjectTypeNode) candidate );
node = otn;
}
}
// all ObjectTypeNodes belong to the main partition
partition = RuleBasePartitionId.MAIN_PARTITION;
} else if ( isSharingEnabledForNode( context,
candidate ) ) {
if ( (context.getTupleSource() != null) && NodeTypeEnums.isLeftTupleSink( candidate ) ) {
node = context.getTupleSource().getSinkPropagator().getMatchingNode( candidate );
} else if ( (context.getObjectSource() != null) && NodeTypeEnums.isObjectSink( candidate ) ) {
node = context.getObjectSource().getObjectSinkPropagator().getMatchingNode( candidate );
} else {
throw new RuntimeException( "This is a bug on node sharing verification. Please report to development team." );
}
}
if ( node != null && !areNodesCompatibleForSharing(context, node) ) {
node = null;
}
if ( node == null ) {
// only attach() if it is a new node
node = candidate;
// new node, so it must be labeled
if ( partition == null ) {
// if it does not has a predefined label
if ( context.getPartitionId() == null ) {
// if no label in current context, create one
context.setPartitionId( context.getKnowledgeBase().createNewPartitionId() );
}
partition = context.getPartitionId();
}
// set node whit the actual partition label
node.setPartitionId( context, partition );
node.attach(context);
} else {
// shared node found
mergeNodes(node, candidate);
// undo previous id assignment
context.releaseId( candidate );
if ( partition == null && context.getPartitionId() == null ) {
partition = node.getPartitionId();
// if no label in current context, create one
context.setPartitionId( partition );
}
}
// adds the node to the context list to track all added nodes
context.getNodes().add( node );
node.addAssociation( context, context.getRule() );
return (T)node;
}
private void mergeNodes(BaseNode node, BaseNode duplicate) {
if (node instanceof AlphaNode) {
AlphaNodeFieldConstraint alphaConstraint = ((AlphaNode) node).getConstraint();
alphaConstraint.addPackageNames(((AlphaNode) duplicate).getConstraint().getPackageNames());
alphaConstraint.mergeEvaluationContext(((AlphaNode) duplicate).getConstraint());
} else if (node instanceof BetaNode) {
BetaNodeFieldConstraint[] betaConstraints = ((BetaNode) node).getConstraints();
int i = 0;
for (BetaNodeFieldConstraint betaConstraint : betaConstraints) {
betaConstraint.addPackageNames(((BetaNode) duplicate).getConstraints()[i].getPackageNames());
betaConstraint.mergeEvaluationContext(((BetaNode) duplicate).getConstraints()[i]);
i++;
}
}
}
/**
* Utility function to check if sharing is enabled for nodes of the given class
*/
private boolean isSharingEnabledForNode(BuildContext context, BaseNode node) {
if ( NodeTypeEnums.isLeftTupleSource( node )) {
return context.getKnowledgeBase().getConfiguration().isShareBetaNodes();
} else if ( NodeTypeEnums.isObjectSource( node ) ) {
return context.getKnowledgeBase().getConfiguration().isShareAlphaNodes();
}
return false;
}
private boolean areNodesCompatibleForSharing(BuildContext context, BaseNode node) {
if ( node.getType() == NodeTypeEnums.RightInputAdaterNode ) {
// avoid subnetworks sharing when they belong to 2 different agenda-groups
String agendaGroup = context.getRule().getAgendaGroup();
for (Rule associatedRule : node.getAssociatedRules()) {
if (!agendaGroup.equals( (( RuleImpl ) associatedRule).getAgendaGroup() )) {
return false;
}
}
}
return true;
}
/**
* Creates and returns a BetaConstraints object for the given list of constraints
*
* @param context the current build context
* @param list the list of constraints
*/
public BetaConstraints createBetaNodeConstraint(final BuildContext context,
final List<BetaNodeFieldConstraint> list,
final boolean disableIndexing) {
BetaConstraints constraints;
switch ( list.size() ) {
case 0 :
constraints = EmptyBetaConstraints.getInstance();
break;
case 1 :
constraints = BetaNodeConstraintFactory.Factory.get().createSingleBetaConstraints( list.get( 0 ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 2 :
constraints = BetaNodeConstraintFactory.Factory.get().createDoubleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 3 :
constraints = BetaNodeConstraintFactory.Factory.get().createTripleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
case 4 :
constraints = BetaNodeConstraintFactory.Factory.get().createQuadroupleBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
break;
default :
constraints = BetaNodeConstraintFactory.Factory.get().createDefaultBetaConstraints( list.toArray( new BetaNodeFieldConstraint[list.size()] ),
context.getKnowledgeBase().getConfiguration(),
disableIndexing );
}
return constraints;
}
/**
* Calculates the temporal distance between all event patterns in the given
* subrule.
*
* @param groupElement the root element of a subrule being added to the rulebase
*/
public TemporalDependencyMatrix calculateTemporalDistance(GroupElement groupElement) {
// find the events
List<Pattern> events = new ArrayList<Pattern>();
selectAllEventPatterns( events,
groupElement );
final int size = events.size();
if ( size >= 1 ) {
// create the matrix
Interval[][] source = new Interval[size][];
for ( int row = 0; row < size; row++ ) {
source[row] = new Interval[size];
for ( int col = 0; col < size; col++ ) {
if ( row == col ) {
source[row][col] = new Interval( 0,
0 );
} else {
source[row][col] = new Interval( Interval.MIN,
Interval.MAX );
}
}
}
Interval[][] result;
if ( size > 1 ) {
List<Declaration> declarations = new ArrayList<>();
int eventIndex = 0;
// populate the matrix
for ( Pattern event : events ) {
// references to other events are always backward references, so we can build the list as we go
declarations.add( event.getDeclaration() );
Map<Declaration, Interval> temporal = new HashMap<>();
gatherTemporalRelationships( event.getConstraints(),
temporal );
// intersects default values with the actual constrained intervals
for ( Map.Entry<Declaration, Interval> entry : temporal.entrySet() ) {
int targetIndex = declarations.indexOf( entry.getKey() );
Interval interval = entry.getValue();
source[targetIndex][eventIndex].intersect( interval );
Interval reverse = new Interval( interval.getUpperBound() == Long.MAX_VALUE ? Long.MIN_VALUE : -interval.getUpperBound(),
interval.getLowerBound() == Long.MIN_VALUE ? Long.MAX_VALUE : -interval.getLowerBound() );
source[eventIndex][targetIndex].intersect( reverse );
}
eventIndex++;
}
result = TimeUtils.calculateTemporalDistance( source );
} else {
result = source;
}
return new TemporalDependencyMatrix( result, events );
}
return null;
}
private void gatherTemporalRelationships(List< ? > constraints,
Map<Declaration, Interval> temporal) {
for ( Object obj : constraints ) {
if ( obj instanceof IntervalProviderConstraint) {
IntervalProviderConstraint constr = (IntervalProviderConstraint) obj;
if ( constr.isTemporal() ) {
// if a constraint already exists, calculate the intersection
Declaration[] decs = constr.getRequiredDeclarations();
// only calculate relationships to other event patterns
if( decs.length > 0 && decs[0].isPatternDeclaration() && decs[0].getPattern().getObjectType().isEvent() ) {
Declaration target = decs[0];
Interval interval = temporal.get( target );
if ( interval == null ) {
interval = constr.getInterval();
temporal.put( target,
interval );
} else {
interval.intersect( constr.getInterval() );
}
}
}
} else if ( obj instanceof AbstractCompositeConstraint ) {
gatherTemporalRelationships( Arrays.asList( ((AbstractCompositeConstraint) obj).getBetaConstraints() ),
temporal );
}
}
}
private void selectAllEventPatterns(List<Pattern> events,
RuleConditionElement rce) {
if ( rce instanceof Pattern ) {
Pattern p = (Pattern) rce;
if ( p.getObjectType().isEvent() ) {
events.add( p );
}
}
for ( RuleConditionElement child : rce.getNestedElements() ) {
selectAllEventPatterns( events,
child );
}
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.engine.policies;
import io.apiman.gateway.engine.async.IAsyncResult;
import io.apiman.gateway.engine.async.IAsyncResultHandler;
import io.apiman.gateway.engine.beans.PolicyFailure;
import io.apiman.gateway.engine.beans.PolicyFailureType;
import io.apiman.gateway.engine.beans.ServiceRequest;
import io.apiman.gateway.engine.beans.ServiceResponse;
import io.apiman.gateway.engine.components.IPolicyFailureFactoryComponent;
import io.apiman.gateway.engine.components.IRateLimiterComponent;
import io.apiman.gateway.engine.components.rate.RateLimitResponse;
import io.apiman.gateway.engine.io.AbstractStream;
import io.apiman.gateway.engine.io.IApimanBuffer;
import io.apiman.gateway.engine.io.IReadWriteStream;
import io.apiman.gateway.engine.policies.config.TransferDirectionType;
import io.apiman.gateway.engine.policies.config.TransferQuotaConfig;
import io.apiman.gateway.engine.policies.i18n.Messages;
import io.apiman.gateway.engine.policy.IPolicyChain;
import io.apiman.gateway.engine.policy.IPolicyContext;
import io.apiman.gateway.engine.rates.RateBucketPeriod;
import java.util.Map;
/**
* Policy that enforces transfer quotas.
*
* @author eric.wittmann@redhat.com
*/
public class TransferQuotaPolicy extends AbstractMappedDataPolicy<TransferQuotaConfig> {
private static final String BUCKET_ID_ATTR = TransferQuotaPolicy.class.getName() + ".bucketId"; //$NON-NLS-1$
private static final String PERIOD_ATTR = TransferQuotaPolicy.class.getName() + ".period"; //$NON-NLS-1$
private static final String BYTES_UPLOADED_ATTR = TransferQuotaPolicy.class.getName() + ".bytesUploaded"; //$NON-NLS-1$
private static final String RATE_LIMIT_RESPONSE_ATTR = TransferQuotaPolicy.class.getName() + ".rateLimitResponse"; //$NON-NLS-1$
private static final String DEFAULT_LIMIT_HEADER = "X-TransferQuota-Limit"; //$NON-NLS-1$
private static final String DEFAULT_REMAINING_HEADER = "X-TransferQuota-Remaining"; //$NON-NLS-1$
private static final String DEFAULT_RESET_HEADER = "X-TransferQuota-Reset"; //$NON-NLS-1$
/**
* Constructor.
*/
public TransferQuotaPolicy() {
}
/**
* @see io.apiman.gateway.engine.policy.AbstractPolicy#getConfigurationClass()
*/
@Override
protected Class<TransferQuotaConfig> getConfigurationClass() {
return TransferQuotaConfig.class;
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedPolicy#doApply(io.apiman.gateway.engine.beans.ServiceRequest, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object, io.apiman.gateway.engine.policy.IPolicyChain)
*/
@Override
protected void doApply(final ServiceRequest request, final IPolicyContext context, final TransferQuotaConfig config,
final IPolicyChain<ServiceRequest> chain) {
// *************************************************************
// Step 1: check to see if we're already in violation of this
// policy. If so, fail fast.
// *************************************************************
String bucketId = "XFERQUOTA||" + RateLimitingPolicy.bucketId(request, config); //$NON-NLS-1$
final RateBucketPeriod period = RateLimitingPolicy.getPeriod(config);
if (bucketId == RateLimitingPolicy.NO_USER_AVAILABLE) {
IPolicyFailureFactoryComponent failureFactory = context.getComponent(IPolicyFailureFactoryComponent.class);
PolicyFailure failure = failureFactory.createFailure(PolicyFailureType.Other, PolicyFailureCodes.NO_USER_FOR_RATE_LIMITING, Messages.i18n.format("TransferQuotaPolicy.NoUser")); //$NON-NLS-1$
chain.doFailure(failure);
return;
}
if (bucketId == RateLimitingPolicy.NO_APPLICATION_AVAILABLE) {
IPolicyFailureFactoryComponent failureFactory = context.getComponent(IPolicyFailureFactoryComponent.class);
PolicyFailure failure = failureFactory.createFailure(PolicyFailureType.Other, PolicyFailureCodes.NO_APP_FOR_RATE_LIMITING, Messages.i18n.format("TransferQuotaPolicy.NoApp")); //$NON-NLS-1$
chain.doFailure(failure);
return;
}
context.setAttribute(BUCKET_ID_ATTR, bucketId);
context.setAttribute(PERIOD_ATTR, period);
IRateLimiterComponent rateLimiter = context.getComponent(IRateLimiterComponent.class);
rateLimiter.accept(bucketId, period, config.getLimit(), 0, new IAsyncResultHandler<RateLimitResponse>() {
@Override
public void handle(IAsyncResult<RateLimitResponse> result) {
if (result.isError()) {
chain.throwError(result.getError());
} else {
RateLimitResponse rtr = result.getResult();
context.setAttribute(RATE_LIMIT_RESPONSE_ATTR, rtr);
if (!rtr.isAccepted()) {
doQuotaExceededFailure(context, config, chain, rtr);
} else {
chain.doApply(request);
}
}
}
});
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedDataPolicy#requestDataHandler(io.apiman.gateway.engine.beans.ServiceRequest, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object)
*/
@Override
protected IReadWriteStream<ServiceRequest> requestDataHandler(final ServiceRequest request,
final IPolicyContext context, final TransferQuotaConfig config) {
// *************************************************************
// Step 2: if upload quotas are enabled, then count all bytes
// uploaded to the back-end API
// *************************************************************
if (config.getDirection() == TransferDirectionType.upload || config.getDirection() == TransferDirectionType.both) {
return new AbstractStream<ServiceRequest>() {
private long total = 0;
@Override
public ServiceRequest getHead() {
return request;
}
@Override
protected void handleHead(ServiceRequest head) {
}
@Override
public void write(IApimanBuffer chunk) {
total += chunk.length();
super.write(chunk);
}
@Override
public void end() {
context.setAttribute(BYTES_UPLOADED_ATTR, total);
super.end();
}
};
} else {
return null;
}
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedPolicy#doApply(io.apiman.gateway.engine.beans.ServiceResponse, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object, io.apiman.gateway.engine.policy.IPolicyChain)
*/
@Override
protected void doApply(final ServiceResponse response, final IPolicyContext context, final TransferQuotaConfig config,
final IPolicyChain<ServiceResponse> chain) {
// *************************************************************
// Step 3: store the upload count (if appropriate) and fail if
// the transfer limit was exceeded
// *************************************************************
if (config.getDirection() == TransferDirectionType.upload || config.getDirection() == TransferDirectionType.both) {
final String bucketId = context.getAttribute(BUCKET_ID_ATTR, (String) null);
final RateBucketPeriod period = context.getAttribute(PERIOD_ATTR, (RateBucketPeriod) null);
final long uploadedBytes = context.getAttribute(BYTES_UPLOADED_ATTR, (Long) null);
IRateLimiterComponent rateLimiter = context.getComponent(IRateLimiterComponent.class);
rateLimiter.accept(bucketId, period, config.getLimit(), uploadedBytes, new IAsyncResultHandler<RateLimitResponse>() {
@Override
public void handle(IAsyncResult<RateLimitResponse> result) {
if (result.isError()) {
chain.throwError(result.getError());
} else {
RateLimitResponse rtr = result.getResult();
if (!rtr.isAccepted()) {
doQuotaExceededFailure(context, config, chain, rtr);
} else {
Map<String, String> responseHeaders = RateLimitingPolicy.responseHeaders(
config, rtr, defaultLimitHeader(), defaultRemainingHeader(),
defaultResetHeader());
response.getHeaders().putAll(responseHeaders);
chain.doApply(response);
}
}
}
});
} else {
Map<String, String> responseHeaders = RateLimitingPolicy.responseHeaders(config,
context.getAttribute(RATE_LIMIT_RESPONSE_ATTR, (RateLimitResponse) null),
defaultLimitHeader(), defaultRemainingHeader(), defaultResetHeader());
response.getHeaders().putAll(responseHeaders);
chain.doApply(response);
}
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedDataPolicy#responseDataHandler(io.apiman.gateway.engine.beans.ServiceResponse, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object)
*/
@Override
protected IReadWriteStream<ServiceResponse> responseDataHandler(final ServiceResponse response,
final IPolicyContext context, final TransferQuotaConfig config) {
// *************************************************************
// Step 4: if download quotas are enabled, then count all bytes
// downloaded from the back-end API and store the count
// ---
// Note: we have no easy way to fail the request if the download
// quota is exceeded - so we'll pass and then fail on the
// next request (see Step 1)
// *************************************************************
if (config.getDirection() == TransferDirectionType.download || config.getDirection() == TransferDirectionType.both) {
return new AbstractStream<ServiceResponse>() {
private long total = 0;
@Override
public ServiceResponse getHead() {
return response;
}
@Override
protected void handleHead(ServiceResponse head) {
}
@Override
public void write(IApimanBuffer chunk) {
total += chunk.length();
super.write(chunk);
}
@Override
public void end() {
doFinalApply(context, config, total);
super.end();
}
};
} else {
return null;
}
}
/**
* Called when everything is done (the last byte is written). This is used to
* record the # of bytes downloaded.
* @param context
* @param config
* @param downloadedBytes
*/
protected void doFinalApply(IPolicyContext context, TransferQuotaConfig config, long downloadedBytes) {
if (config.getDirection() == TransferDirectionType.download || config.getDirection() == TransferDirectionType.both) {
final String bucketId = context.getAttribute(BUCKET_ID_ATTR, (String) null);
final RateBucketPeriod period = context.getAttribute(PERIOD_ATTR, (RateBucketPeriod) null);
IRateLimiterComponent rateLimiter = context.getComponent(IRateLimiterComponent.class);
rateLimiter.accept(bucketId, period, config.getLimit(), downloadedBytes, new IAsyncResultHandler<RateLimitResponse>() {
@Override
public void handle(IAsyncResult<RateLimitResponse> result) {
// No need to handle the response - it's too late to do anything meaningful with the result.
// TODO log any error that might have ocurred
}
});
}
}
/**
* Called to send a 'quota exceeded' failure.
* @param context
* @param config
* @param chain
* @param rtr
*/
protected void doQuotaExceededFailure(final IPolicyContext context, final TransferQuotaConfig config,
final IPolicyChain<?> chain, RateLimitResponse rtr) {
Map<String, String> responseHeaders = RateLimitingPolicy.responseHeaders(config, rtr,
defaultLimitHeader(), defaultRemainingHeader(), defaultResetHeader());
IPolicyFailureFactoryComponent failureFactory = context.getComponent(IPolicyFailureFactoryComponent.class);
PolicyFailure failure = limitExceededFailure(failureFactory);
failure.getHeaders().putAll(responseHeaders);
chain.doFailure(failure);
}
/**
* @param responseHeaders
* @param failureFactory
*/
protected PolicyFailure limitExceededFailure(IPolicyFailureFactoryComponent failureFactory) {
PolicyFailure failure = failureFactory.createFailure(PolicyFailureType.Other,
PolicyFailureCodes.BYTE_QUOTA_EXCEEDED,
Messages.i18n.format("TransferQuotaPolicy.RateExceeded")); //$NON-NLS-1$
failure.setResponseCode(429);
return failure;
}
/**
* @return the default reset header
*/
protected String defaultResetHeader() {
return DEFAULT_RESET_HEADER;
}
/**
* @return the default remaining header
*/
protected String defaultRemainingHeader() {
return DEFAULT_REMAINING_HEADER;
}
/**
* @return the default limit header
*/
protected String defaultLimitHeader() {
return DEFAULT_LIMIT_HEADER;
}
}
| |
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.bsbhv.pmbean;
import java.util.*;
import org.dbflute.outsidesql.typed.*;
import org.dbflute.jdbc.*;
import org.dbflute.cbean.coption.LikeSearchOption;
import org.dbflute.outsidesql.PmbCustodial;
import org.dbflute.util.DfTypeUtil;
import org.docksidestage.mysql.dbflute.allcommon.*;
import org.docksidestage.mysql.dbflute.exbhv.*;
import org.docksidestage.mysql.dbflute.exentity.customize.*;
/**
* The base class for typed parameter-bean of SimpleMember. <br>
* This is related to "<span style="color: #AD4747">selectSimpleMember</span>" on MemberBhv.
* @author DBFlute(AutoGenerator)
*/
public class BsSimpleMemberPmb implements ListHandlingPmb<MemberBhv, SimpleMember>, EntityHandlingPmb<MemberBhv, SimpleMember>, FetchBean {
// ===================================================================================
// Attribute
// =========
/** The parameter of memberId. */
protected Integer _memberId;
/** The parameter of memberName:likePrefix. */
protected String _memberName;
/** The option of like-search for memberName. */
protected LikeSearchOption _memberNameInternalLikeSearchOption;
/** The parameter of birthdate. */
protected java.time.LocalDate _birthdate;
/** The max size of safety result. */
protected int _safetyMaxResultSize;
/** The time-zone for filtering e.g. from-to. (NullAllowed: if null, default zone) */
protected TimeZone _timeZone;
// ===================================================================================
// Constructor
// ===========
/**
* Constructor for the typed parameter-bean of SimpleMember. <br>
* This is related to "<span style="color: #AD4747">selectSimpleMember</span>" on MemberBhv.
*/
public BsSimpleMemberPmb() {
}
// ===================================================================================
// Typed Implementation
// ====================
/**
* {@inheritDoc}
*/
public String getOutsideSqlPath() { return "selectSimpleMember"; }
/**
* Get the type of an entity for result. (implementation)
* @return The type instance of an entity, customize entity. (NotNull)
*/
public Class<SimpleMember> getEntityType() { return SimpleMember.class; }
// ===================================================================================
// Safety Result
// =============
/**
* {@inheritDoc}
*/
public void checkSafetyResult(int safetyMaxResultSize) {
_safetyMaxResultSize = safetyMaxResultSize;
}
/**
* {@inheritDoc}
*/
public int getSafetyMaxResultSize() {
return _safetyMaxResultSize;
}
// ===================================================================================
// Assist Helper
// =============
// -----------------------------------------------------
// String
// ------
protected String filterStringParameter(String value) { return isEmptyStringParameterAllowed() ? value : convertEmptyToNull(value); }
protected boolean isEmptyStringParameterAllowed() { return DBFluteConfig.getInstance().isEmptyStringParameterAllowed(); }
protected String convertEmptyToNull(String value) { return PmbCustodial.convertEmptyToNull(value); }
protected void assertLikeSearchOptionValid(String name, LikeSearchOption option) { PmbCustodial.assertLikeSearchOptionValid(name, option); }
// -----------------------------------------------------
// Date
// ----
protected Date toUtilDate(Object date) { return PmbCustodial.toUtilDate(date, _timeZone); }
protected <DATE> DATE toLocalDate(Date date, Class<DATE> localType) { return PmbCustodial.toLocalDate(date, localType, chooseRealTimeZone()); }
protected TimeZone chooseRealTimeZone() { return PmbCustodial.chooseRealTimeZone(_timeZone); }
/**
* Set time-zone, basically for LocalDate conversion. <br>
* Normally you don't need to set this, you can adjust other ways. <br>
* (DBFlute system's time-zone is used as default)
* @param timeZone The time-zone for filtering. (NullAllowed: if null, default zone)
*/
public void zone(TimeZone timeZone) { _timeZone = timeZone; }
// -----------------------------------------------------
// by Option Handling
// ------------------
// might be called by option handling
protected <NUMBER extends Number> NUMBER toNumber(Object obj, Class<NUMBER> type) { return PmbCustodial.toNumber(obj, type); }
protected Boolean toBoolean(Object obj) { return PmbCustodial.toBoolean(obj); }
@SuppressWarnings("unchecked")
protected <ELEMENT> ArrayList<ELEMENT> newArrayList(ELEMENT... elements) { return PmbCustodial.newArrayList(elements); }
// ===================================================================================
// Basic Override
// ==============
/**
* @return The display string of all parameters. (NotNull)
*/
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(DfTypeUtil.toClassTitle(this)).append(":");
sb.append(xbuildColumnString());
return sb.toString();
}
protected String xbuildColumnString() {
final String dm = ", ";
final StringBuilder sb = new StringBuilder();
sb.append(dm).append(_memberId);
sb.append(dm).append(_memberName);
sb.append(dm).append(_birthdate);
if (sb.length() > 0) { sb.delete(0, dm.length()); }
sb.insert(0, "{").append("}");
return sb.toString();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] memberId <br>
* @return The value of memberId. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public Integer getMemberId() {
return _memberId;
}
/**
* [set] memberId <br>
* @param memberId The value of memberId. (NullAllowed)
*/
public void setMemberId(Integer memberId) {
_memberId = memberId;
}
/**
* [get] memberName:likePrefix <br>
* @return The value of memberName. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public String getMemberName() {
return filterStringParameter(_memberName);
}
/**
* [set as prefixSearch] memberName:likePrefix <br>
* @param memberName The value of memberName. (NullAllowed)
*/
public void setMemberName_PrefixSearch(String memberName) {
_memberName = memberName;
_memberNameInternalLikeSearchOption = new LikeSearchOption().likePrefix();
}
/**
* Get the internal option of likeSearch for memberName. {Internal Method: Don't invoke this}
* @return The internal option of likeSearch for memberName. (NullAllowed)
*/
public LikeSearchOption getMemberNameInternalLikeSearchOption() {
return _memberNameInternalLikeSearchOption;
}
/**
* [get] birthdate <br>
* @return The value of birthdate. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public java.time.LocalDate getBirthdate() {
return _birthdate;
}
/**
* [set] birthdate <br>
* @param birthdate The value of birthdate. (NullAllowed)
*/
public void setBirthdate(java.time.LocalDate birthdate) {
_birthdate = birthdate;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.lookup;
import com.intellij.codeInsight.completion.InsertHandler;
import com.intellij.codeInsight.completion.InsertionContext;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.psi.util.PsiUtilCore;
import gnu.trove.THashSet;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Collections;
import java.util.Set;
/**
* @author peter
*
* @see LookupElementDecorator
* @see com.intellij.codeInsight.completion.PrioritizedLookupElement
*/
public final class LookupElementBuilder extends LookupElement {
@NotNull private final String myLookupString;
@NotNull private final Object myObject;
@Nullable private final SmartPsiElementPointer<?> myPsiElement;
private final boolean myCaseSensitive;
@Nullable private final InsertHandler<LookupElement> myInsertHandler;
@Nullable private final LookupElementRenderer<LookupElement> myRenderer;
@Nullable private final LookupElementPresentation myHardcodedPresentation;
@NotNull private final Set<String> myAllLookupStrings;
private LookupElementBuilder(@NotNull String lookupString, @NotNull Object object, @Nullable InsertHandler<LookupElement> insertHandler,
@Nullable LookupElementRenderer<LookupElement> renderer,
@Nullable LookupElementPresentation hardcodedPresentation,
@Nullable SmartPsiElementPointer<?> psiElement,
@NotNull Set<String> allLookupStrings,
boolean caseSensitive) {
myLookupString = lookupString;
myObject = object;
myInsertHandler = insertHandler;
myRenderer = renderer;
myHardcodedPresentation = hardcodedPresentation;
myPsiElement = psiElement;
myAllLookupStrings = Collections.unmodifiableSet(allLookupStrings);
myCaseSensitive = caseSensitive;
}
private LookupElementBuilder(@NotNull String lookupString, @NotNull Object object) {
this(lookupString, object, null, null, null, null, Collections.singleton(lookupString), true);
}
@NotNull
public static LookupElementBuilder create(@NotNull String lookupString) {
return new LookupElementBuilder(lookupString, lookupString);
}
public static LookupElementBuilder create(@NotNull Object object) {
return new LookupElementBuilder(object.toString(), object);
}
public static LookupElementBuilder createWithSmartPointer(@NotNull String lookupString, @NotNull PsiElement element) {
PsiUtilCore.ensureValid(element);
return new LookupElementBuilder(lookupString,
SmartPointerManager.getInstance(element.getProject()).createSmartPsiElementPointer(element));
}
public static LookupElementBuilder create(@NotNull PsiNamedElement element) {
PsiUtilCore.ensureValid(element);
return new LookupElementBuilder(StringUtil.notNullize(element.getName()), element);
}
public static LookupElementBuilder createWithIcon(@NotNull PsiNamedElement element) {
PsiUtilCore.ensureValid(element);
return create(element).withIcon(element.getIcon(0));
}
public static LookupElementBuilder create(@NotNull Object lookupObject, @NotNull String lookupString) {
if (lookupObject instanceof PsiElement) {
PsiUtilCore.ensureValid((PsiElement)lookupObject);
}
return new LookupElementBuilder(lookupString, lookupObject);
}
/**
* @deprecated use {@link #withInsertHandler(InsertHandler)}
*/
@Contract(pure=true)
public LookupElementBuilder setInsertHandler(@Nullable InsertHandler<LookupElement> insertHandler) {
return withInsertHandler(insertHandler);
}
@Contract(pure=true)
public LookupElementBuilder withInsertHandler(@Nullable InsertHandler<LookupElement> insertHandler) {
return new LookupElementBuilder(myLookupString, myObject, insertHandler, myRenderer, myHardcodedPresentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withRenderer(LookupElementRenderer)}
*/
@Contract(pure=true)
public LookupElementBuilder setRenderer(@Nullable LookupElementRenderer<LookupElement> renderer) {
return withRenderer(renderer);
}
@Contract(pure=true)
public LookupElementBuilder withRenderer(@Nullable LookupElementRenderer<LookupElement> renderer) {
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, renderer, myHardcodedPresentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
@Override
@NotNull
public Set<String> getAllLookupStrings() {
return myAllLookupStrings;
}
/**
* @deprecated use {@link #withIcon(Icon)}
*/
@Contract(pure=true)
public LookupElementBuilder setIcon(@Nullable Icon icon) {
return withIcon(icon);
}
@Contract(pure=true)
public LookupElementBuilder withIcon(@Nullable Icon icon) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setIcon(icon);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
@NotNull
private LookupElementPresentation copyPresentation() {
final LookupElementPresentation presentation = new LookupElementPresentation();
if (myHardcodedPresentation != null) {
presentation.copyFrom(myHardcodedPresentation);
} else {
presentation.setItemText(myLookupString);
}
return presentation;
}
/**
* @deprecated use {@link #withLookupString(String)}
*/
@Contract(pure=true)
public LookupElementBuilder addLookupString(@NotNull String another) {
return withLookupString(another);
}
@Contract(pure=true)
public LookupElementBuilder withLookupString(@NotNull String another) {
final THashSet<String> set = new THashSet<>(myAllLookupStrings);
set.add(another);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, myRenderer, myHardcodedPresentation, myPsiElement,
Collections.unmodifiableSet(set), myCaseSensitive);
}
@Override
public boolean isCaseSensitive() {
return myCaseSensitive;
}
/**
* @deprecated use {@link #withCaseSensitivity(boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setCaseSensitive(boolean caseSensitive) {
return withCaseSensitivity(caseSensitive);
}
/**
* @param caseSensitive if this lookup item should be completed in the same letter case as prefix
* @return modified builder
* @see com.intellij.codeInsight.completion.CompletionResultSet#caseInsensitive()
*/
@Contract(pure=true)
public LookupElementBuilder withCaseSensitivity(boolean caseSensitive) {
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, myRenderer, myHardcodedPresentation, myPsiElement,
myAllLookupStrings, caseSensitive);
}
/**
* Allows to pass custom PSI that will be returned from {@link #getPsiElement()}.
*/
@Contract(pure=true)
public LookupElementBuilder withPsiElement(@Nullable PsiElement psi) {
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, myRenderer, myHardcodedPresentation,
psi == null ? null : SmartPointerManager.createPointer(psi),
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withItemTextForeground(Color)}
*/
@Contract(pure=true)
public LookupElementBuilder setItemTextForeground(@NotNull Color itemTextForeground) {
return withItemTextForeground(itemTextForeground);
}
@Contract(pure=true)
public LookupElementBuilder withItemTextForeground(@NotNull Color itemTextForeground) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setItemTextForeground(itemTextForeground);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement, myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withItemTextUnderlined(boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setItemTextUnderlined(boolean underlined) {
return withItemTextUnderlined(underlined);
}
@Contract(pure=true)
public LookupElementBuilder withItemTextUnderlined(boolean underlined) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setItemTextUnderlined(underlined);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement, myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withTypeText(String)}
*/
@Contract(pure=true)
public LookupElementBuilder setTypeText(@Nullable String typeText) {
return withTypeText(typeText);
}
@Contract(pure=true)
public LookupElementBuilder withTypeText(@Nullable String typeText) {
return withTypeText(typeText, false);
}
/**
* @deprecated use {@link #withTypeText(String, boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setTypeText(@Nullable String typeText, boolean grayed) {
return withTypeText(typeText, grayed);
}
@Contract(pure=true)
public LookupElementBuilder withTypeText(@Nullable String typeText, boolean grayed) {
return withTypeText(typeText, null, grayed);
}
@Contract(pure=true)
public LookupElementBuilder withTypeText(@Nullable String typeText, @Nullable Icon typeIcon, boolean grayed) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setTypeText(typeText, typeIcon);
presentation.setTypeGrayed(grayed);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @since 2018.1
*/
public LookupElementBuilder withTypeIconRightAligned(boolean typeIconRightAligned) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setTypeIconRightAligned(typeIconRightAligned);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withPresentableText(String)}
*/
@Contract(pure=true)
public LookupElementBuilder setPresentableText(@NotNull String presentableText) {
return withPresentableText(presentableText);
}
@Contract(pure=true)
public LookupElementBuilder withPresentableText(@NotNull String presentableText) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setItemText(presentableText);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #bold()}
*/
@Contract(pure=true)
public LookupElementBuilder setBold() {
return bold();
}
@Contract(pure=true)
public LookupElementBuilder bold() {
return withBoldness(true);
}
/**
* @deprecated use {@link #withBoldness(boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setBold(boolean bold) {
return withBoldness(bold);
}
@Contract(pure=true)
public LookupElementBuilder withBoldness(boolean bold) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setItemTextBold(bold);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #strikeout()}
*/
@Contract(pure=true)
public LookupElementBuilder setStrikeout() {
return strikeout();
}
@Contract(pure=true)
public LookupElementBuilder strikeout() {
return withStrikeoutness(true);
}
/**
* @deprecated use {@link #withStrikeoutness(boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setStrikeout(boolean strikeout) {
return withStrikeoutness(strikeout);
}
@Contract(pure=true)
public LookupElementBuilder withStrikeoutness(boolean strikeout) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setStrikeout(strikeout);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
/**
* @deprecated use {@link #withTailText(String)}
*/
@Contract(pure=true)
public LookupElementBuilder setTailText(@Nullable String tailText) {
return withTailText(tailText);
}
@Contract(pure=true)
public LookupElementBuilder withTailText(@Nullable String tailText) {
return withTailText(tailText, false);
}
/**
* @deprecated use {@link #withTailText(String, boolean)}
*/
@Contract(pure=true)
public LookupElementBuilder setTailText(@Nullable String tailText, boolean grayed) {
return withTailText(tailText, grayed);
}
@Contract(pure=true)
public LookupElementBuilder withTailText(@Nullable String tailText, boolean grayed) {
final LookupElementPresentation presentation = copyPresentation();
presentation.setTailText(tailText, grayed);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement,
myAllLookupStrings, myCaseSensitive);
}
@Contract(pure=true)
public LookupElementBuilder appendTailText(@NotNull String tailText, boolean grayed) {
final LookupElementPresentation presentation = copyPresentation();
presentation.appendTailText(tailText, grayed);
return new LookupElementBuilder(myLookupString, myObject, myInsertHandler, null, presentation, myPsiElement, myAllLookupStrings, myCaseSensitive);
}
@Contract(pure=true)
public LookupElement withAutoCompletionPolicy(AutoCompletionPolicy policy) {
return policy.applyPolicy(this);
}
@NotNull
@Override
public String getLookupString() {
return myLookupString;
}
@Nullable
public InsertHandler<LookupElement> getInsertHandler() {
return myInsertHandler;
}
@NotNull
@Override
public Object getObject() {
return myObject;
}
@Nullable
@Override
public PsiElement getPsiElement() {
if (myPsiElement != null) return myPsiElement.getElement();
return super.getPsiElement();
}
@Override
public void handleInsert(InsertionContext context) {
if (myInsertHandler != null) {
myInsertHandler.handleInsert(context, this);
}
}
@Override
public void renderElement(LookupElementPresentation presentation) {
if (myRenderer != null) {
myRenderer.renderElement(this, presentation);
}
else if (myHardcodedPresentation != null) {
presentation.copyFrom(myHardcodedPresentation);
} else {
presentation.setItemText(myLookupString);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LookupElementBuilder that = (LookupElementBuilder)o;
final InsertHandler<LookupElement> insertHandler = that.myInsertHandler;
if (myInsertHandler != null && insertHandler != null ? !myInsertHandler.getClass().equals(insertHandler.getClass())
: myInsertHandler != insertHandler) return false;
if (!myLookupString.equals(that.myLookupString)) return false;
if (!myObject.equals(that.myObject)) return false;
final LookupElementRenderer<LookupElement> renderer = that.myRenderer;
if (myRenderer != null && renderer != null ? !myRenderer.getClass().equals(renderer.getClass()) : myRenderer != renderer) return false;
return true;
}
@Override
public String toString() {
return "LookupElementBuilder: string=" + getLookupString() + "; handler=" + myInsertHandler;
}
@Override
public int hashCode() {
int result = 0;
result = 31 * result + (myInsertHandler != null ? myInsertHandler.getClass().hashCode() : 0);
result = 31 * result + (myLookupString.hashCode());
result = 31 * result + (myObject.hashCode());
result = 31 * result + (myRenderer != null ? myRenderer.getClass().hashCode() : 0);
return result;
}
}
| |
package com.oneliang.util.common;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.oneliang.Constants;
/**
* String util class
* @author Dandelion
* @since 2008-08-21
*/
public final class StringUtil{
private static final String METCH_PATTERN_REGEX="[\\*]+";
private static final String METCH_PATTERN=Constants.Symbol.WILDCARD;
private static final String METCH_PATTERN_REPLACEMENT="[\\\\S|\\\\s]*";
public static final String BLANK="";
public static final String SPACE=" ";
public static final String NULL="null";
public static final String CRLF_STRING="\r\n";
public static final String CR_STRING="\r";
public static final String LF_STRING="\n";
public static final byte CR='\r';
public static final byte LF='\n';
public static final byte[] CRLF={CR,LF};
private static final String ZERO="0";
private StringUtil(){}
/**
* when string is null return blank,where the string is not null it return string.trim
* @param string
* @return String
*/
public static String trim(final String string){
String result=null;
if(string==null){
result=BLANK;
}else{
result=string.trim();
}
return result;
}
/**
* when string is null return blank string
* @param string
* @return String
*/
public static String nullToBlank(final String string){
return string==null?BLANK:string;
}
/**
* when string[] is null return blank array
* @param stringArray
* @return String[]{} length==0
*/
public static String[] nullToBlank(final String[] stringArray){
String[] result=stringArray;
if(stringArray==null){
result=new String[]{};
}
return result;
}
/**
* <p>Checks if a String is whitespace, empty ("") or null.</p>
*
* <pre>
* StringUtils.isBlank(null) = true
* StringUtils.isBlank("") = true
* StringUtils.isBlank(" ") = true
* StringUtils.isBlank("bob") = false
* StringUtils.isBlank(" bob ") = false
* </pre>
*
* @param string the String to check, may be null
* @return <code>true</code> if the String is null, empty or whitespace
*/
public static boolean isBlank(final String string) {
boolean result = false;
int strLen;
if (string == null || (strLen = string.length()) == 0) {
result = true;
} else {
for (int i = 0; i < strLen; i++) {
if (!Character.isWhitespace(string.charAt(i))) {
result = false;
break;
}
}
}
return result;
}
/**
* <p>
* Checks if a String is not empty (""), not null and not whitespace only.
* </p>
*
* <pre>
* StringUtils.isNotBlank(null) = false
* StringUtils.isNotBlank("") = false
* StringUtils.isNotBlank(" ") = false
* StringUtils.isNotBlank("bob") = true
* StringUtils.isNotBlank(" bob ") = true
* </pre>
*
* @param string
* the String to check, may be null
* @return <code>true</code> if the String is not empty and not null and
* not whitespace
*/
public static boolean isNotBlank(final String string) {
return !isBlank(string);
}
/**
* compare stringArray1 and stringArray2 return the different in str1
* @param stringArray1
* @param stringArray2
* @return String[]
*/
public static String[] compareString(final String[] stringArray1,final String[] stringArray2){
String[] differentString=null;
if(stringArray1!=null&&stringArray2!=null){
List<String> list=new ArrayList<String>();
for(int i=0;i<stringArray1.length;i++){
boolean sign=false;
for(int j=0;j<stringArray2.length;j++){
if(stringArray1[i].equals(stringArray2[j])){
sign=true;
break;
}
}
if(!sign){
list.add(stringArray1[i]);
}
}
differentString=new String[list.size()];
differentString=list.toArray(differentString);
}
return differentString;
}
/**
* <p>Method:only for '*' match pattern,return true of false</p>
* @param string
* @param patternString
* @return boolean
*/
public static boolean isMatchPattern(final String string, final String patternString) {
boolean result=false;
if(string!=null&&patternString!=null){
if(patternString.indexOf(METCH_PATTERN)>=0){
String matchPattern=Constants.Symbol.XOR+patternString.replaceAll(METCH_PATTERN_REGEX, METCH_PATTERN_REPLACEMENT)+Constants.Symbol.DOLLAR;
result=isMatchRegex(string, matchPattern);
}else{
if(string.equals(patternString)){
result=true;
}
}
}
return result;
}
/**
* <p>Method:only for regex</p>
* @param string
* @param regex
* @return boolean
*/
public static boolean isMatchRegex(final String string,final String regex){
boolean result=false;
if(string!=null&®ex!=null){
Pattern pattern=Pattern.compile(regex);
Matcher matcher=pattern.matcher(string);
result=matcher.find();
}
return result;
}
/**
* <p>Method:only for regex,parse regex group when regex include group</p>
* @param string
* @param regex
* @return List<String>
*/
public static List<String> parseRegexGroup(final String string,final String regex){
List<String> groupList=null;
if(string!=null&®ex!=null){
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(string);
int groupCount=matcher.groupCount();
int count=1;
groupList=new ArrayList<String>();
if(matcher.find()){
while (count<=groupCount) {
groupList.add(matcher.group(count));
count++;
}
}
}
return groupList;
}
/**
* <p>
* Method: check the string match the regex or not and return all the match
* </p>
* @param string
* @param regex
* @return List<String>
*/
public static List<String> parseStringGroup(final String string,final String regex) {
return parseStringGroup(string, regex, BLANK, BLANK, 0);
}
/**
* <p>
* Method: check the string match the regex or not and return the match
* field value
* like {xxxx} can find xxxx
* </p>
* @param string
* @param regex
* @param firstRegex
* @param firstRegexReplace
* @param lastRegexStringLength like {xxxx},last regex string is "}" so last regex string length equals 1
* @return List<String>
*/
public static List<String> parseStringGroup(final String string,final String regex,final String firstRegex,final String firstRegexReplace,final int lastRegexStringLength) {
List<String> list = null;
if(string!=null){
list = new ArrayList<String>();
int lastRegexLength=lastRegexStringLength<0?0:lastRegexStringLength;
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(string);
String group = null;
int start = 0;
while (matcher.find(start)) {
start = matcher.end();
group = matcher.group();
group = group.replaceFirst(firstRegex, firstRegexReplace);
group = group.substring(0, group.length() - lastRegexLength);
list.add(group);
}
}
return list;
}
/**
* byte array to hex string
* @param byteArray
* @return String
*/
public static String byteArrayToHexString(byte[] byteArray){
StringBuilder builder=new StringBuilder();
for(int i=0;i<byteArray.length;i++){
int byteCode=byteArray[i]&0xFF;
if(byteCode<0x10){
builder.append(0);
}
builder.append(Integer.toHexString(byteCode));
}
return builder.toString();
}
/**
* hex string to byte array
* @param string
* @return byte
*/
public static byte[] hexStringToByteArray(final String string){
byte[] bytes=null;
if(string!=null){
bytes=new byte[string.length()/2];
int i=0;
while(i<bytes.length){
bytes[i]=(byte)(Integer.parseInt(string.substring(i*2, (i+1)*2),16));
i++;
}
}
return bytes;
}
/**
* fill zero
* @param length
* @return String
*/
public static String fillZero(int length){
StringBuilder stringBuilder=new StringBuilder();
for(int i=0;i<length;i++){
stringBuilder.append(ZERO);
}
return stringBuilder.toString();
}
/**
* <p>Method: string mod operator,return 0~(mod-1)</p>
* @param string
* @param mod
* @return int
*/
public static int stringMod(String string,int mod){
int hashCode=0;
if(string!=null){
hashCode=string.hashCode();
if (hashCode<0) {
hashCode=Math.abs(hashCode);
hashCode=hashCode<0?0:hashCode;
}
}
return hashCode%(mod>0?mod:1);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.script.TemplateScript;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Represents a single document being captured before indexing and holds the source and metadata (like id, type and index).
*/
public final class IngestDocument {
public static final String INGEST_KEY = "_ingest";
private static final String INGEST_KEY_PREFIX = INGEST_KEY + ".";
private static final String SOURCE_PREFIX = SourceFieldMapper.NAME + ".";
static final String TIMESTAMP = "timestamp";
private final Map<String, Object> sourceAndMetadata;
private final Map<String, Object> ingestMetadata;
public IngestDocument(String index, String type, String id, String routing,
Long version, VersionType versionType, Map<String, Object> source) {
this.sourceAndMetadata = new HashMap<>();
this.sourceAndMetadata.putAll(source);
this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index);
this.sourceAndMetadata.put(MetaData.TYPE.getFieldName(), type);
this.sourceAndMetadata.put(MetaData.ID.getFieldName(), id);
if (routing != null) {
this.sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), routing);
}
if (version != null) {
sourceAndMetadata.put(MetaData.VERSION.getFieldName(), version);
}
if (versionType != null) {
sourceAndMetadata.put(MetaData.VERSION_TYPE.getFieldName(), VersionType.toString(versionType));
}
this.ingestMetadata = new HashMap<>();
this.ingestMetadata.put(TIMESTAMP, ZonedDateTime.now(ZoneOffset.UTC));
}
/**
* Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument
*/
public IngestDocument(IngestDocument other) {
this(deepCopyMap(other.sourceAndMetadata), deepCopyMap(other.ingestMetadata));
}
/**
* Constructor needed for testing that allows to create a new {@link IngestDocument} given the provided elasticsearch metadata,
* source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at
* init time, which makes equality comparisons impossible in tests.
*/
public IngestDocument(Map<String, Object> sourceAndMetadata, Map<String, Object> ingestMetadata) {
this.sourceAndMetadata = sourceAndMetadata;
this.ingestMetadata = ingestMetadata;
}
/**
* Returns the value contained in the document for the provided path
* @param path The path within the document in dot-notation
* @param clazz The expected class of the field value
* @return the value for the provided path if existing, null otherwise
* @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist
* or if the field that is found at the provided path is not of the expected type.
*/
public <T> T getFieldValue(String path, Class<T> clazz) {
FieldPath fieldPath = new FieldPath(path);
Object context = fieldPath.initialContext;
for (String pathElement : fieldPath.pathElements) {
context = resolve(pathElement, path, context);
}
return cast(path, context, clazz);
}
/**
* Returns the value contained in the document for the provided path
*
* @param path The path within the document in dot-notation
* @param clazz The expected class of the field value
* @param ignoreMissing The flag to determine whether to throw an exception when `path` is not found in the document.
* @return the value for the provided path if existing, null otherwise.
* @throws IllegalArgumentException only if ignoreMissing is false and the path is null, empty, invalid, if the field doesn't exist
* or if the field that is found at the provided path is not of the expected type.
*/
public <T> T getFieldValue(String path, Class<T> clazz, boolean ignoreMissing) {
try {
return getFieldValue(path, clazz);
} catch (IllegalArgumentException e) {
if (ignoreMissing && hasField(path) != true) {
return null;
} else {
throw e;
}
}
}
/**
* Returns the value contained in the document with the provided templated path
* @param pathTemplate The path within the document in dot-notation
* @param clazz The expected class fo the field value
* @return the value fro the provided path if existing, null otherwise
* @throws IllegalArgumentException if the pathTemplate is null, empty, invalid, if the field doesn't exist,
* or if the field that is found at the provided path is not of the expected type.
*/
public <T> T getFieldValue(TemplateScript.Factory pathTemplate, Class<T> clazz) {
return getFieldValue(renderTemplate(pathTemplate), clazz);
}
/**
* Returns the value contained in the document for the provided path as a byte array.
* If the path value is a string, a base64 decode operation will happen.
* If the path value is a byte array, it is just returned
* @param path The path within the document in dot-notation
* @return the byte array for the provided path if existing
* @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist
* or if the field that is found at the provided path is not of the expected type.
*/
public byte[] getFieldValueAsBytes(String path) {
return getFieldValueAsBytes(path, false);
}
/**
* Returns the value contained in the document for the provided path as a byte array.
* If the path value is a string, a base64 decode operation will happen.
* If the path value is a byte array, it is just returned
* @param path The path within the document in dot-notation
* @param ignoreMissing The flag to determine whether to throw an exception when `path` is not found in the document.
* @return the byte array for the provided path if existing
* @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist
* or if the field that is found at the provided path is not of the expected type.
*/
public byte[] getFieldValueAsBytes(String path, boolean ignoreMissing) {
Object object = getFieldValue(path, Object.class, ignoreMissing);
if (object == null) {
return null;
} else if (object instanceof byte[]) {
return (byte[]) object;
} else if (object instanceof String) {
return Base64.getDecoder().decode(object.toString());
} else {
throw new IllegalArgumentException("Content field [" + path + "] of unknown type [" + object.getClass().getName() +
"], must be string or byte array");
}
}
/**
* Checks whether the document contains a value for the provided templated path
* @param fieldPathTemplate the template for the path within the document in dot-notation
* @return true if the document contains a value for the field, false otherwise
* @throws IllegalArgumentException if the path is null, empty or invalid
*/
public boolean hasField(TemplateScript.Factory fieldPathTemplate) {
return hasField(renderTemplate(fieldPathTemplate));
}
/**
* Checks whether the document contains a value for the provided path
* @param path The path within the document in dot-notation
* @return true if the document contains a value for the field, false otherwise
* @throws IllegalArgumentException if the path is null, empty or invalid.
*/
public boolean hasField(String path) {
return hasField(path, false);
}
/**
* Checks whether the document contains a value for the provided path
* @param path The path within the document in dot-notation
* @param failOutOfRange Whether to throw an IllegalArgumentException if array is accessed outside of its range
* @return true if the document contains a value for the field, false otherwise
* @throws IllegalArgumentException if the path is null, empty or invalid.
*/
public boolean hasField(String path, boolean failOutOfRange) {
FieldPath fieldPath = new FieldPath(path);
Object context = fieldPath.initialContext;
for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
String pathElement = fieldPath.pathElements[i];
if (context == null) {
return false;
}
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
context = map.get(pathElement);
} else if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
try {
int index = Integer.parseInt(pathElement);
if (index < 0 || index >= list.size()) {
if (failOutOfRange) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" +
list.size() + "] as part of path [" + path +"]");
} else {
return false;
}
}
context = list.get(index);
} catch (NumberFormatException e) {
return false;
}
} else {
return false;
}
}
String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
return map.containsKey(leafKey);
}
if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
try {
int index = Integer.parseInt(leafKey);
if (index >= 0 && index < list.size()) {
return true;
} else {
if (failOutOfRange) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" +
list.size() + "] as part of path [" + path +"]");
} else {
return false;
}
}
} catch (NumberFormatException e) {
return false;
}
}
return false;
}
/**
* Removes the field identified by the provided path.
* @param fieldPathTemplate Resolves to the path with dot-notation within the document
* @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist.
*/
public void removeField(TemplateScript.Factory fieldPathTemplate) {
removeField(renderTemplate(fieldPathTemplate));
}
/**
* Removes the field identified by the provided path.
* @param path the path of the field to be removed
* @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist.
*/
public void removeField(String path) {
FieldPath fieldPath = new FieldPath(path);
Object context = fieldPath.initialContext;
for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
context = resolve(fieldPath.pathElements[i], path, context);
}
String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
if (map.containsKey(leafKey)) {
map.remove(leafKey);
return;
}
throw new IllegalArgumentException("field [" + leafKey + "] not present as part of path [" + path + "]");
}
if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
int index;
try {
index = Integer.parseInt(leafKey);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" +
path + "]", e);
}
if (index < 0 || index >= list.size()) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() +
"] as part of path [" + path + "]");
}
list.remove(index);
return;
}
if (context == null) {
throw new IllegalArgumentException("cannot remove [" + leafKey + "] from null as part of path [" + path + "]");
}
throw new IllegalArgumentException("cannot remove [" + leafKey + "] from object of type [" + context.getClass().getName() +
"] as part of path [" + path + "]");
}
private static Object resolve(String pathElement, String fullPath, Object context) {
if (context == null) {
throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + fullPath + "]");
}
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
if (map.containsKey(pathElement)) {
return map.get(pathElement);
}
throw new IllegalArgumentException("field [" + pathElement + "] not present as part of path [" + fullPath + "]");
}
if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
int index;
try {
index = Integer.parseInt(pathElement);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path ["
+ fullPath + "]", e);
}
if (index < 0 || index >= list.size()) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() +
"] as part of path [" + fullPath + "]");
}
return list.get(index);
}
throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() +
"] as part of path [" + fullPath + "]");
}
/**
* Appends the provided value to the provided path in the document.
* Any non existing path element will be created.
* If the path identifies a list, the value will be appended to the existing list.
* If the path identifies a scalar, the scalar will be converted to a list and
* the provided value will be added to the newly created list.
* Supports multiple values too provided in forms of list, in that case all the values will be appended to the
* existing (or newly created) list.
* @param path The path within the document in dot-notation
* @param value The value or values to append to the existing ones
* @throws IllegalArgumentException if the path is null, empty or invalid.
*/
public void appendFieldValue(String path, Object value) {
setFieldValue(path, value, true);
}
/**
* Appends the provided value to the provided path in the document.
* Any non existing path element will be created.
* If the path identifies a list, the value will be appended to the existing list.
* If the path identifies a scalar, the scalar will be converted to a list and
* the provided value will be added to the newly created list.
* Supports multiple values too provided in forms of list, in that case all the values will be appended to the
* existing (or newly created) list.
* @param fieldPathTemplate Resolves to the path with dot-notation within the document
* @param valueSource The value source that will produce the value or values to append to the existing ones
* @throws IllegalArgumentException if the path is null, empty or invalid.
*/
public void appendFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) {
Map<String, Object> model = createTemplateModel();
appendFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model));
}
/**
* Sets the provided value to the provided path in the document.
* Any non existing path element will be created.
* If the last item in the path is a list, the value will replace the existing list as a whole.
* Use {@link #appendFieldValue(String, Object)} to append values to lists instead.
* @param path The path within the document in dot-notation
* @param value The value to put in for the path key
* @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the
* item identified by the provided path.
*/
public void setFieldValue(String path, Object value) {
setFieldValue(path, value, false);
}
/**
* Sets the provided value to the provided path in the document.
* Any non existing path element will be created. If the last element is a list,
* the value will replace the existing list.
* @param fieldPathTemplate Resolves to the path with dot-notation within the document
* @param valueSource The value source that will produce the value to put in for the path key
* @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the
* item identified by the provided path.
*/
public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) {
Map<String, Object> model = createTemplateModel();
setFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model), false);
}
private void setFieldValue(String path, Object value, boolean append) {
FieldPath fieldPath = new FieldPath(path);
Object context = fieldPath.initialContext;
for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
String pathElement = fieldPath.pathElements[i];
if (context == null) {
throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + path + "]");
}
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
if (map.containsKey(pathElement)) {
context = map.get(pathElement);
} else {
HashMap<Object, Object> newMap = new HashMap<>();
map.put(pathElement, newMap);
context = newMap;
}
} else if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
int index;
try {
index = Integer.parseInt(pathElement);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("[" + pathElement +
"] is not an integer, cannot be used as an index as part of path [" + path + "]", e);
}
if (index < 0 || index >= list.size()) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" +
list.size() + "] as part of path [" + path + "]");
}
context = list.get(index);
} else {
throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" +
context.getClass().getName() + "] as part of path [" + path + "]");
}
}
String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
if (context == null) {
throw new IllegalArgumentException("cannot set [" + leafKey + "] with null parent as part of path [" + path + "]");
}
if (context instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) context;
if (append) {
if (map.containsKey(leafKey)) {
Object object = map.get(leafKey);
List<Object> list = appendValues(object, value);
if (list != object) {
map.put(leafKey, list);
}
} else {
List<Object> list = new ArrayList<>();
appendValues(list, value);
map.put(leafKey, list);
}
return;
}
map.put(leafKey, value);
} else if (context instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) context;
int index;
try {
index = Integer.parseInt(leafKey);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" +
path + "]", e);
}
if (index < 0 || index >= list.size()) {
throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() +
"] as part of path [" + path + "]");
}
if (append) {
Object object = list.get(index);
List<Object> newList = appendValues(object, value);
if (newList != object) {
list.set(index, newList);
}
return;
}
list.set(index, value);
} else {
throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" +
context.getClass().getName() + "] as part of path [" + path + "]");
}
}
@SuppressWarnings("unchecked")
private static List<Object> appendValues(Object maybeList, Object value) {
List<Object> list;
if (maybeList instanceof List) {
//maybeList is already a list, we append the provided values to it
list = (List<Object>) maybeList;
} else {
//maybeList is a scalar, we convert it to a list and append the provided values to it
list = new ArrayList<>();
list.add(maybeList);
}
appendValues(list, value);
return list;
}
private static void appendValues(List<Object> list, Object value) {
if (value instanceof List) {
List<?> valueList = (List<?>) value;
valueList.stream().forEach(list::add);
} else {
list.add(value);
}
}
private static <T> T cast(String path, Object object, Class<T> clazz) {
if (object == null) {
return null;
}
if (clazz.isInstance(object)) {
return clazz.cast(object);
}
throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" +
clazz.getName() + "]");
}
public String renderTemplate(TemplateScript.Factory template) {
return template.newInstance(createTemplateModel()).execute();
}
private Map<String, Object> createTemplateModel() {
Map<String, Object> model = new HashMap<>(sourceAndMetadata);
model.put(SourceFieldMapper.NAME, sourceAndMetadata);
// If there is a field in the source with the name '_ingest' it gets overwritten here,
// if access to that field is required then it get accessed via '_source._ingest'
model.put(INGEST_KEY, ingestMetadata);
return model;
}
/**
* one time operation that extracts the metadata fields from the ingest document and returns them.
* Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call.
*/
public Map<MetaData, Object> extractMetadata() {
Map<MetaData, Object> metadataMap = new EnumMap<>(MetaData.class);
for (MetaData metaData : MetaData.values()) {
metadataMap.put(metaData, sourceAndMetadata.remove(metaData.getFieldName()));
}
return metadataMap;
}
/**
* Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones.
* Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)}
*/
public Map<String, Object> getIngestMetadata() {
return this.ingestMetadata;
}
/**
* Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the
* metadata fields will not be present anymore.
* Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)}
*/
public Map<String, Object> getSourceAndMetadata() {
return this.sourceAndMetadata;
}
@SuppressWarnings("unchecked")
private static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) {
return (Map<K, V>) deepCopy(source);
}
private static Object deepCopy(Object value) {
if (value instanceof Map) {
Map<?, ?> mapValue = (Map<?, ?>) value;
Map<Object, Object> copy = new HashMap<>(mapValue.size());
for (Map.Entry<?, ?> entry : mapValue.entrySet()) {
copy.put(entry.getKey(), deepCopy(entry.getValue()));
}
return copy;
} else if (value instanceof List) {
List<?> listValue = (List<?>) value;
List<Object> copy = new ArrayList<>(listValue.size());
for (Object itemValue : listValue) {
copy.add(deepCopy(itemValue));
}
return copy;
} else if (value instanceof byte[]) {
byte[] bytes = (byte[]) value;
return Arrays.copyOf(bytes, bytes.length);
} else if (value == null || value instanceof String || value instanceof Integer ||
value instanceof Long || value instanceof Float ||
value instanceof Double || value instanceof Boolean ||
value instanceof ZonedDateTime) {
return value;
} else if (value instanceof Date) {
return ((Date) value).clone();
} else {
throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]");
}
}
@Override
public boolean equals(Object obj) {
if (obj == this) { return true; }
if (obj == null || getClass() != obj.getClass()) {
return false;
}
IngestDocument other = (IngestDocument) obj;
return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) &&
Objects.equals(ingestMetadata, other.ingestMetadata);
}
@Override
public int hashCode() {
return Objects.hash(sourceAndMetadata, ingestMetadata);
}
@Override
public String toString() {
return "IngestDocument{" +
" sourceAndMetadata=" + sourceAndMetadata +
", ingestMetadata=" + ingestMetadata +
'}';
}
public enum MetaData {
INDEX(IndexFieldMapper.NAME),
TYPE(TypeFieldMapper.NAME),
ID(IdFieldMapper.NAME),
ROUTING(RoutingFieldMapper.NAME),
VERSION(VersionFieldMapper.NAME),
VERSION_TYPE("_version_type");
private final String fieldName;
MetaData(String fieldName) {
this.fieldName = fieldName;
}
public String getFieldName() {
return fieldName;
}
}
private class FieldPath {
private final String[] pathElements;
private final Object initialContext;
private FieldPath(String path) {
if (Strings.isEmpty(path)) {
throw new IllegalArgumentException("path cannot be null nor empty");
}
String newPath;
if (path.startsWith(INGEST_KEY_PREFIX)) {
initialContext = ingestMetadata;
newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length());
} else {
initialContext = sourceAndMetadata;
if (path.startsWith(SOURCE_PREFIX)) {
newPath = path.substring(SOURCE_PREFIX.length(), path.length());
} else {
newPath = path;
}
}
this.pathElements = newPath.split("\\.");
if (pathElements.length == 1 && pathElements[0].isEmpty()) {
throw new IllegalArgumentException("path [" + path + "] is not valid");
}
}
}
}
| |
package com.vladmihalcea.hibernate.masterclass.laboratory.fetch;
import com.vladmihalcea.hibernate.masterclass.laboratory.util.AbstractTest;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.Session;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.Restrictions;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.*;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.*;
/**
* HibernateApiFetchStrategyTest - Test HQL and Criteria fetch plan overriding capabilities
*
* @author Vlad Mihalcea
*/
public class HibernateApiMultiEagerCollectionFetchStrategyTest extends AbstractTest {
protected final Logger LOGGER = LoggerFactory.getLogger(getClass());
private Long productId;
@Override
protected Class<?>[] entities() {
return new Class<?>[] {
WarehouseProductInfo.class,
Importer.class,
Image.class,
Product.class,
Company.class,
SubVersion.class,
Version.class,
Review.class,
};
}
@Override
public void init() {
super.init();
productId = doInTransaction(new TransactionCallable<Long>() {
@Override
public Long execute(Session session) {
Company company = new Company();
company.setName("TV Company");
session.persist(company);
Product product = new Product("tvCode");
product.setName("TV");
product.setCompany(company);
Image frontImage = new Image();
frontImage.setName("front image");
frontImage.setIndex(0);
Image sideImage = new Image();
sideImage.setName("side image");
sideImage.setIndex(1);
product.addImage(frontImage);
product.addImage(sideImage);
WarehouseProductInfo warehouseProductInfo = new WarehouseProductInfo();
warehouseProductInfo.setQuantity(101);
product.addWarehouse(warehouseProductInfo);
Importer importer = new Importer();
importer.setName("Importer");
session.persist(importer);
product.setImporter(importer);
Review review1 = new Review();
review1.setComment("Great product");
Review review2 = new Review();
review2.setComment("Sensational product");
product.addReview(review1);
product.addReview(review2);
session.persist(product);
return product.getId();
}
});
}
@Test
public void testFetchChild() {
doInTransaction(new TransactionCallable<Void>() {
@Override
public Void execute(Session session) {
LOGGER.info("Fetch using find");
Product product = (Product) session.get(Product.class, productId);
assertNotNull(product);
return null;
}
});
doInTransaction(new TransactionCallable<Void>() {
@Override
public Void execute(Session session) {
LOGGER.info("Fetch using JPQL");
Product product = (Product) session.createQuery(
"select p " +
"from Product p " +
"where p.id = :productId")
.setParameter("productId", productId)
.uniqueResult();
assertNotNull(product);
return null;
}
});
doInTransaction(new TransactionCallable<Void>() {
@Override
public Void execute(Session session) {
LOGGER.info("Fetch using Criteria");
Product product = (Product) session.createCriteria(Product.class)
.add(Restrictions.eq("id", productId))
.uniqueResult();
assertNotNull(product);
return null;
}
});
doInTransaction(new TransactionCallable<Void>() {
@Override
public Void execute(Session session) {
LOGGER.info("Fetch list using Criteria");
List products = session.createCriteria(Product.class)
.add(Restrictions.eq("id", productId))
.list();
assertEquals(4, products.size());
assertSame(products.get(0), products.get(1));
return null;
}
});
doInTransaction(new TransactionCallable<Void>() {
@Override
public Void execute(Session session) {
LOGGER.info("Fetch distinct list using Criteria");
List products = session.createCriteria(Product.class)
.add(Restrictions.eq("id", productId))
.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY)
.list();
assertEquals(1, products.size());
return null;
}
});
}
@Entity(name = "Company")
public static class Company {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Column(unique = true, updatable = false)
private String name;
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(name);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Company)) {
return false;
}
Company that = (Company) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(name, that.name);
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("name", name);
return tsb.toString();
}
}
@Entity(name = "Image")
public static class Image {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Column(updatable = false)
private String name;
@Column(unique = true)
private int index;
@ManyToOne(fetch = FetchType.LAZY)
private Product product;
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "image", orphanRemoval = true)
@OrderBy("type")
private Set<Version> versions = new LinkedHashSet<Version>();
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getIndex() {
return index;
}
public Product getProduct() {
return product;
}
public void setProduct(Product product) {
this.product = product;
}
public void setIndex(int index) {
this.index = index;
}
public Set<Version> getVersions() {
return versions;
}
public void addVersion(Version version) {
versions.add(version);
version.setImage(this);
}
public void removeVersion(Version version) {
versions.remove(version);
version.setImage(null);
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(name);
hcb.append(product);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Image)) {
return false;
}
Image that = (Image) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(name, that.name);
eb.append(product, that.product);
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("name", name);
tsb.append("index", index);
tsb.append("product", product);
return tsb.toString();
}
}
@Entity(name = "Version")
public static class Version {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Column
private String type;
@ManyToOne(fetch = FetchType.EAGER)
private Image image;
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "version", orphanRemoval = true)
private Set<SubVersion> subVersions = new LinkedHashSet<SubVersion>();
public Long getId() {
return id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Image getImage() {
return image;
}
public void setImage(Image image) {
this.image = image;
}
public Set<SubVersion> getSubVersions() {
return subVersions;
}
public void setSubVersions(Set<SubVersion> subVersions) {
this.subVersions = subVersions;
}
public void addSubVersion(SubVersion subVersion) {
subVersions.add(subVersion);
subVersion.setVersion(this);
}
public void removeSubVersion(SubVersion subVersion) {
subVersions.remove(subVersion);
subVersion.setVersion(null);
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(type);
hcb.append(image);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Version)) {
return false;
}
Version that = (Version) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(type, that.getType());
eb.append(image, that.getImage());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("type", type);
tsb.append("image", image);
return tsb.toString();
}
}
@Entity(name = "SubVersion")
public static class SubVersion {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String code;
@ManyToOne(fetch = FetchType.EAGER)
private Version version;
public Long getId() {
return id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Version getVersion() {
return version;
}
public void setVersion(Version version) {
this.version = version;
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(getCode());
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof SubVersion)) {
return false;
}
SubVersion that = (SubVersion) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(getCode(), that.getCode());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("code", getCode());
return tsb.toString();
}
}
@Entity(name = "Importer")
public static class Importer {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String name;
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(name);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Importer)) {
return false;
}
Importer that = (Importer) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(name, that.getName());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("name", name);
return tsb.toString();
}
}
@Entity(name = "review")
public static class Review {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@ManyToOne
private Product product;
private String comment;
public Long getId() {
return id;
}
public Product getProduct() {
return product;
}
public void setProduct(Product product) {
this.product = product;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(comment);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Importer)) {
return false;
}
Importer that = (Importer) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(comment, that.getName());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("comment", comment);
return tsb.toString();
}
}
@Entity(name = "Product")
public static class Product {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String name;
@Column(updatable = false)
private String code;
private Integer quantity;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "company_id", nullable = false)
private Company company;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "product", optional = false)
private WarehouseProductInfo warehouseProductInfo;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "importer_id")
private Importer importer;
@OneToMany(fetch = FetchType.EAGER, cascade = CascadeType.ALL, mappedBy = "product", orphanRemoval = true)
@OrderBy("index")
private Set<Image> images = new LinkedHashSet<Image>();
@OneToMany(fetch = FetchType.EAGER, cascade = CascadeType.ALL, mappedBy = "product", orphanRemoval = true)
private Set<Review> reviews = new LinkedHashSet<Review>();
@javax.persistence.Version
private int version;
public Product() {
}
public Product(String code) {
this.code = code;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public Integer getQuantity() {
return quantity;
}
public void setQuantity(Integer quantity) {
this.quantity = quantity;
}
public Company getCompany() {
return company;
}
public void setCompany(Company company) {
this.company = company;
}
public Set<Image> getImages() {
return images;
}
public WarehouseProductInfo getWarehouseProductInfo() {
return warehouseProductInfo;
}
public void setWarehouseProductInfo(WarehouseProductInfo warehouseProductInfo) {
this.warehouseProductInfo = warehouseProductInfo;
}
public Importer getImporter() {
return importer;
}
public void setImporter(Importer importer) {
this.importer = importer;
}
public final int getVersion() {
return version;
}
public void setImages(Set<Image> images) {
this.images = images;
}
public void addImage(Image image) {
images.add(image);
image.setProduct(this);
}
public void removeImage(Image image) {
images.remove(image);
image.setProduct(null);
}
public void addReview(Review review) {
reviews.add(review);
review.setProduct(this);
}
public void addWarehouse(WarehouseProductInfo warehouseProductInfo) {
warehouseProductInfo.setProduct(this);
this.setWarehouseProductInfo(warehouseProductInfo);
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(name);
hcb.append(company);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Product)) {
return false;
}
Product that = (Product) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(name, that.getName());
eb.append(company, that.getCompany());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("name", name);
tsb.append("version", version);
return tsb.toString();
}
}
@Entity(name = "WarehouseProductInfo")
public static class WarehouseProductInfo {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private int quantity;
@OneToOne(fetch = FetchType.EAGER)
@PrimaryKeyJoinColumn
private Product product;
public Long getId() {
return id;
}
public int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
public Product getProduct() {
return product;
}
public void setProduct(Product product) {
this.product = product;
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(product);
return hcb.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof WarehouseProductInfo)) {
return false;
}
WarehouseProductInfo that = (WarehouseProductInfo) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(product, that.getProduct());
return eb.isEquals();
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
tsb.append("id", id);
tsb.append("name", quantity);
tsb.append("product", product);
return tsb.toString();
}
}
}
| |
package com.barcodescannerfordialogs.qrscanner;
import android.content.Context;
import android.hardware.Camera;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import java.util.List;
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback
{
private static final String TAG = "CameraPreview";
private Camera mCamera;
private Handler mAutoFocusHandler;
private boolean mPreviewing = true;
private boolean mAutoFocus = true;
private Camera.PreviewCallback mPreviewCallback;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
int mCameraWidth;
int mCameraHeight;
public CameraPreview(Context context, int width, int height)
{
super(context);
mCameraWidth = width;
mCameraHeight = height;
}
public CameraPreview(Context context, AttributeSet attrs, int width, int height)
{
super(context, attrs);
mCameraWidth = width;
mCameraHeight = height;
}
public void setCamera(Camera camera, Camera.PreviewCallback previewCallback) {
mCamera = camera;
mPreviewCallback = previewCallback;
mAutoFocusHandler = new Handler();
}
public void initCameraPreview() {
if(mCamera != null) {
getHolder().addCallback(this);
//getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
if(mPreviewing) {
requestLayout();
} else {
showCameraPreview();
}
}
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
showCameraPreview();
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
if(surfaceHolder.getSurface() == null) {
return;
}
stopCameraPreview();
showCameraPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
stopCameraPreview();
}
public void showCameraPreview() {
if(mCamera != null) {
try {
mPreviewing = true;
setupCameraParameters();
mCamera.setPreviewDisplay(getHolder());
mCamera.setDisplayOrientation(getDisplayOrientation());
mCamera.setOneShotPreviewCallback(mPreviewCallback);
mCamera.startPreview();
if(mAutoFocus) {
mCamera.autoFocus(autoFocusCB);
}
} catch (Exception e) {
Log.e(TAG, e.toString(), e);
}
}
}
public void stopCameraPreview() {
if(mCamera != null) {
try {
mPreviewing = false;
mCamera.cancelAutoFocus();
mCamera.setOneShotPreviewCallback(null);
mCamera.stopPreview();
} catch(Exception e) {
Log.e(TAG, e.toString(), e);
}
}
}
public void setupCameraParameters() {
Camera.Size optimalSize = getOptimalPreviewSize();
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(optimalSize.width, optimalSize.height);
mCamera.setParameters(parameters);
}
public int getDisplayOrientation() {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, info);
WindowManager wm = (WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
private Camera.Size getOptimalPreviewSize()
{
List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
Camera.Size optimalSize = null;
// Try to find a size that matches the desired aspect ratio
for (Camera.Size size : sizes)
{
boolean isDesiredRatio = (size.width / 4) == (size.height / 3);
boolean isBetterSize = (optimalSize == null || size.width > optimalSize.width);
boolean isInBounds = size.width <= PREVIEW_SIZE_MAX_WIDTH;
if(isDesiredRatio && isBetterSize && isInBounds)
{
optimalSize = size;
}
}
if(optimalSize == null)
{
return sizes.get(0);
}
// DEBUG
Log.i(TAG, "optimal size: " + optimalSize.width + "x" + optimalSize.height);
return optimalSize;
}
public void setAutoFocus(boolean state) {
if(mCamera != null && mPreviewing) {
if(state == mAutoFocus) {
return;
}
mAutoFocus = state;
if(mAutoFocus) {
Log.v(TAG, "Starting autofocus");
mCamera.autoFocus(autoFocusCB);
} else {
Log.v(TAG, "Cancelling autofocus");
mCamera.cancelAutoFocus();
}
}
}
private Runnable doAutoFocus = new Runnable() {
public void run() {
if(mCamera != null && mPreviewing && mAutoFocus) {
mCamera.autoFocus(autoFocusCB);
}
}
};
// Mimic continuous auto-focusing
Camera.AutoFocusCallback autoFocusCB = new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
mAutoFocusHandler.postDelayed(doAutoFocus, 1000);
}
};
}
| |
package com.thinkbiganalytics.feedmgr.service.feed;
/*-
* #%L
* thinkbig-feed-manager-controller
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.Serializable;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.ws.rs.NotFoundException;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.DataAccessException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.security.core.context.SecurityContextHolder;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.thinkbiganalytics.datalake.authorization.service.HadoopAuthorizationService;
import com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder;
import com.thinkbiganalytics.feedmgr.nifi.PropertyExpressionResolver;
import com.thinkbiganalytics.feedmgr.nifi.cache.NifiFlowCache;
import com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata;
import com.thinkbiganalytics.feedmgr.rest.model.FeedSummary;
import com.thinkbiganalytics.feedmgr.rest.model.NifiFeed;
import com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate;
import com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplateRequest;
import com.thinkbiganalytics.feedmgr.rest.model.ReusableTemplateConnectionInfo;
import com.thinkbiganalytics.feedmgr.rest.model.UIFeed;
import com.thinkbiganalytics.feedmgr.rest.model.UserField;
import com.thinkbiganalytics.feedmgr.rest.model.UserProperty;
import com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl;
import com.thinkbiganalytics.feedmgr.service.UserPropertyTransform;
import com.thinkbiganalytics.feedmgr.service.feed.datasource.DerivedDatasourceFactory;
import com.thinkbiganalytics.feedmgr.service.security.SecurityService;
import com.thinkbiganalytics.feedmgr.service.template.FeedManagerTemplateService;
import com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService;
import com.thinkbiganalytics.feedmgr.sla.ServiceLevelAgreementService;
import com.thinkbiganalytics.json.ObjectMapperSerializer;
import com.thinkbiganalytics.metadata.api.MetadataAccess;
import com.thinkbiganalytics.metadata.api.category.Category;
import com.thinkbiganalytics.metadata.api.category.CategoryProvider;
import com.thinkbiganalytics.metadata.api.category.security.CategoryAccessControl;
import com.thinkbiganalytics.metadata.api.datasource.Datasource;
import com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider;
import com.thinkbiganalytics.metadata.api.event.MetadataChange;
import com.thinkbiganalytics.metadata.api.event.MetadataEventListener;
import com.thinkbiganalytics.metadata.api.event.MetadataEventService;
import com.thinkbiganalytics.metadata.api.event.feed.FeedChange;
import com.thinkbiganalytics.metadata.api.event.feed.FeedChangeEvent;
import com.thinkbiganalytics.metadata.api.event.feed.FeedPropertyChangeEvent;
import com.thinkbiganalytics.metadata.api.extension.UserFieldDescriptor;
import com.thinkbiganalytics.metadata.api.feed.Feed;
import com.thinkbiganalytics.metadata.api.feed.FeedProperties;
import com.thinkbiganalytics.metadata.api.feed.FeedProvider;
import com.thinkbiganalytics.metadata.api.feed.FeedSource;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeedProvider;
import com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl;
import com.thinkbiganalytics.metadata.api.security.HadoopSecurityGroup;
import com.thinkbiganalytics.metadata.api.template.FeedManagerTemplate;
import com.thinkbiganalytics.metadata.api.template.FeedManagerTemplateProvider;
import com.thinkbiganalytics.metadata.api.template.security.TemplateAccessControl;
import com.thinkbiganalytics.metadata.modeshape.MetadataRepositoryException;
import com.thinkbiganalytics.metadata.rest.model.sla.Obligation;
import com.thinkbiganalytics.metadata.sla.api.ObligationGroup;
import com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementBuilder;
import com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementProvider;
import com.thinkbiganalytics.nifi.feedmgr.FeedRollbackException;
import com.thinkbiganalytics.nifi.feedmgr.InputOutputPort;
import com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient;
import com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform;
import com.thinkbiganalytics.nifi.rest.model.NifiProcessGroup;
import com.thinkbiganalytics.nifi.rest.model.NifiProperty;
import com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil;
import com.thinkbiganalytics.policy.precondition.DependentFeedPrecondition;
import com.thinkbiganalytics.policy.precondition.Precondition;
import com.thinkbiganalytics.policy.precondition.transform.PreconditionPolicyTransformer;
import com.thinkbiganalytics.policy.rest.model.FieldRuleProperty;
import com.thinkbiganalytics.policy.rest.model.PreconditionRule;
import com.thinkbiganalytics.rest.model.LabelValue;
import com.thinkbiganalytics.security.AccessController;
import com.thinkbiganalytics.security.action.Action;
import com.thinkbiganalytics.support.FeedNameUtil;
public class DefaultFeedManagerFeedService implements FeedManagerFeedService {
private static final Logger log = LoggerFactory.getLogger(DefaultFeedManagerFeedService.class);
private static final Pageable PAGE_ALL = new PageRequest(1, Integer.MAX_VALUE);
/**
* Event listener for precondition events
*/
private final MetadataEventListener<FeedPropertyChangeEvent> feedPropertyChangeListener = new FeedPropertyChangeDispatcher();
@Inject
FeedManagerTemplateProvider templateProvider;
@Inject
FeedManagerTemplateService templateRestProvider;
@Inject
FeedManagerPreconditionService feedPreconditionModelTransform;
@Inject
FeedModelTransform feedModelTransform;
@Inject
ServiceLevelAgreementProvider slaProvider;
@Inject
ServiceLevelAgreementService serviceLevelAgreementService;
@Inject
OpsManagerFeedProvider opsManagerFeedProvider;
@Inject
private DatasourceProvider datasourceProvider;
/**
* Metadata event service
*/
@Inject
private AccessController accessController;
@Inject
private MetadataEventService metadataEventService;
@Inject
private NiFiPropertyDescriptorTransform propertyDescriptorTransform;
@Inject
private DerivedDatasourceFactory derivedDatasourceFactory;
// use autowired instead of Inject to allow null values.
@Autowired(required = false)
@Qualifier("hadoopAuthorizationService")
private HadoopAuthorizationService hadoopAuthorizationService;
@Inject
private SecurityService securityService;
@Inject
protected CategoryProvider categoryProvider;
@Inject
protected FeedProvider feedProvider;
@Inject
protected MetadataAccess metadataAccess;
@Inject
private FeedManagerTemplateService feedManagerTemplateService;
@Inject
private RegisteredTemplateService registeredTemplateService;
@Inject
PropertyExpressionResolver propertyExpressionResolver;
@Inject
NifiFlowCache nifiFlowCache;
@Inject
private LegacyNifiRestClient nifiRestClient;
@Inject
private FeedHiveTableService feedHiveTableService;
@Value("${nifi.remove.inactive.versioned.feeds:true}")
private boolean removeInactiveNifiVersionedFeedFlows;
/**
* Adds listeners for transferring events.
*/
@PostConstruct
public void addEventListener() {
metadataEventService.addListener(feedPropertyChangeListener);
}
/**
* Removes listeners and stops transferring events.
*/
@PreDestroy
public void removeEventListener() {
metadataEventService.removeListener(feedPropertyChangeListener);
}
@Override
public boolean checkFeedPermission(String id, Action action, Action... more) {
if (accessController.isEntityAccessControlled()) {
return metadataAccess.read(() -> {
Feed.ID domainId = feedProvider.resolveId(id);
Feed domainFeed = feedProvider.findById(domainId);
if (domainFeed != null) {
domainFeed.getAllowedActions().checkPermission(action, more);
return true;
} else {
return false;
}
});
} else {
return true;
}
}
@Override
public FeedMetadata getFeedByName(final String categoryName, final String feedName) {
FeedMetadata feedMetadata = metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
Feed domainFeed = feedProvider.findBySystemName(categoryName, feedName);
if (domainFeed != null) {
return feedModelTransform.domainToFeedMetadata(domainFeed);
}
return null;
});
return feedMetadata;
}
@Override
public FeedMetadata getFeedById(final String id) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
return getFeedById(id, false);
});
}
@Override
public FeedMetadata getFeedById(final String id, final boolean refreshTargetTableSchema) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
FeedMetadata feedMetadata = null;
Feed.ID domainId = feedProvider.resolveId(id);
Feed domainFeed = feedProvider.findById(domainId);
if (domainFeed != null) {
feedMetadata = feedModelTransform.domainToFeedMetadata(domainFeed);
}
if (refreshTargetTableSchema && feedMetadata != null) {
//commented out for now as some issues were found with feeds with TEXTFILE as their output
//this will attempt to sync the schema stored in modeshape with that in Hive
// feedModelTransform.refreshTableSchemaFromHive(feedMetadata);
}
return feedMetadata;
});
}
@Override
public Collection<FeedMetadata> getFeeds() {
return getFeeds(PAGE_ALL, null).getContent();
}
public Page<FeedMetadata> getFeeds(Pageable pageable, String filter) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
Page<Feed> domainFeeds = feedProvider.findPage(pageable, filter);
return domainFeeds.map(d -> feedModelTransform.domainToFeedMetadata(d));
});
}
@Override
public Collection<? extends UIFeed> getFeeds(boolean verbose) {
if (verbose) {
return getFeeds();
} else {
return getFeedSummaryData();
}
}
@Override
public Page<UIFeed> getFeeds(boolean verbose, Pageable pageable, String filter) {
if (verbose) {
return getFeeds(pageable, filter).map(UIFeed.class::cast);
} else {
return getFeedSummaryData(pageable, filter).map(UIFeed.class::cast);
}
}
@Override
public List<FeedSummary> getFeedSummaryData() {
return getFeedSummaryData(PAGE_ALL, null).getContent().stream()
.map(FeedSummary.class::cast)
.collect(Collectors.toList());
}
public Page<FeedSummary> getFeedSummaryData(Pageable pageable, String filter) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
Page<Feed> domainFeeds = feedProvider.findPage(pageable, filter);
return domainFeeds.map(d -> feedModelTransform.domainToFeedSummary(d));
});
}
@Override
public List<FeedSummary> getFeedSummaryForCategory(final String categoryId) {
return metadataAccess.read(() -> {
List<FeedSummary> summaryList = new ArrayList<>();
boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
if (hasPermission) {
Category.ID categoryDomainId = categoryProvider.resolveId(categoryId);
List<? extends Feed> domainFeeds = feedProvider.findByCategoryId(categoryDomainId);
if (domainFeeds != null && !domainFeeds.isEmpty()) {
List<FeedMetadata> feeds = feedModelTransform.domainToFeedMetadata(domainFeeds);
for (FeedMetadata feed : feeds) {
summaryList.add(new FeedSummary(feed));
}
}
}
return summaryList;
});
}
@Override
public List<FeedMetadata> getFeedsWithTemplate(final String registeredTemplateId) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
List<FeedMetadata> feedMetadatas = null;
FeedManagerTemplate.ID templateDomainId = templateProvider.resolveId(registeredTemplateId);
List<? extends Feed> domainFeeds = feedProvider.findByTemplateId(templateDomainId);
if (domainFeeds != null) {
feedMetadatas = feedModelTransform.domainToFeedMetadata(domainFeeds);
}
return feedMetadatas;
});
}
@Override
public Feed.ID resolveFeed(@Nonnull Serializable fid) {
return metadataAccess.read(() -> feedProvider.resolveFeed(fid));
}
/**
* Create/Update a Feed in NiFi
* Save the metadata to Kylo meta store
*
* @param feedMetadata the feed metadata
* @return an object indicating if the feed creation was successful or not
*/
public NifiFeed createFeed(final FeedMetadata feedMetadata) {
//functional access to be able to create a feed
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS);
if (feedMetadata.getState() == null) {
if (feedMetadata.isActive()) {
feedMetadata.setState(Feed.State.ENABLED.name());
} else {
feedMetadata.setState(Feed.State.DISABLED.name());
}
}
NifiFeed feed = createAndSaveFeed(feedMetadata);
//register the audit for the update event
if (feed.isSuccess() && !feedMetadata.isNew()) {
Feed.State state = Feed.State.valueOf(feedMetadata.getState());
Feed.ID id = feedProvider.resolveId(feedMetadata.getId());
notifyFeedStateChange(feedMetadata, id, state, MetadataChange.ChangeType.UPDATE);
} else if (feed.isSuccess() && feedMetadata.isNew()) {
//update the access control
feedMetadata.toRoleMembershipChangeList().stream().forEach(roleMembershipChange -> securityService.changeFeedRoleMemberships(feed.getFeedMetadata().getId(), roleMembershipChange));
}
return feed;
}
/**
* Create/Update a Feed in NiFi
* Save the metadata to Kylo meta store
*
* @param feedMetadata the feed metadata
* @return an object indicating if the feed creation was successful or not
*/
private NifiFeed createAndSaveFeed(FeedMetadata feedMetadata) {
NifiFeed feed = null;
if (StringUtils.isBlank(feedMetadata.getId())) {
feedMetadata.setIsNew(true);
//If the feed is New we need to ensure the user has CREATE_FEED entity permission
if (accessController.isEntityAccessControlled()) {
metadataAccess.read(() -> {
//ensure the user has rights to create feeds under the category
Category domainCategory = categoryProvider.findById(categoryProvider.resolveId(feedMetadata.getCategory().getId()));
if (domainCategory == null) {
//throw exception
throw new MetadataRepositoryException("Unable to find the category " + feedMetadata.getCategory().getSystemName());
}
domainCategory.getAllowedActions().checkPermission(CategoryAccessControl.CREATE_FEED);
//ensure the user has rights to create feeds using the template
FeedManagerTemplate domainTemplate = templateProvider.findById(templateProvider.resolveId(feedMetadata.getTemplateId()));
if (domainTemplate == null) {
throw new MetadataRepositoryException("Unable to find the template " + feedMetadata.getTemplateId());
}
domainTemplate.getAllowedActions().checkPermission(TemplateAccessControl.CREATE_FEED);
});
}
} else if (accessController.isEntityAccessControlled()) {
metadataAccess.read(() -> {
//perform explict entity access check here as we dont want to modify the NiFi flow unless user has access to edit the feed
Feed.ID domainId = feedProvider.resolveId(feedMetadata.getId());
Feed domainFeed = feedProvider.findById(domainId);
if (domainFeed != null) {
domainFeed.getAllowedActions().checkPermission(FeedAccessControl.EDIT_DETAILS);
} else {
throw new NotFoundException("Feed not found for id " + feedMetadata.getId());
}
});
}
//replace expressions with values
if (feedMetadata.getTable() != null) {
feedMetadata.getTable().updateMetadataFieldValues();
}
if (feedMetadata.getProperties() == null) {
feedMetadata.setProperties(new ArrayList<NifiProperty>());
}
//store ref to the originalFeedProperties before resolving and merging with the template
List<NifiProperty> orignialFeedProperties = feedMetadata.getProperties();
//get all the properties for the metadata
RegisteredTemplate
registeredTemplate =
registeredTemplateService.findRegisteredTemplate(
new RegisteredTemplateRequest.Builder().templateId(feedMetadata.getTemplateId()).templateName(feedMetadata.getTemplateName()).isFeedEdit(true).includeSensitiveProperties(true)
.build());
//update the template properties with the feedMetadata properties
List<NifiProperty> matchedProperties =
NifiPropertyUtil
.matchAndSetPropertyByProcessorName(registeredTemplate.getProperties(), feedMetadata.getProperties(), NifiPropertyUtil.PROPERTY_MATCH_AND_UPDATE_MODE.UPDATE_ALL_PROPERTIES);
feedMetadata.setProperties(registeredTemplate.getProperties());
feedMetadata.setRegisteredTemplate(registeredTemplate);
//resolve any ${metadata.} properties
List<NifiProperty> resolvedProperties = propertyExpressionResolver.resolvePropertyExpressions(feedMetadata);
/*
//store all input related properties as well
List<NifiProperty> inputProperties = NifiPropertyUtil
.findInputProperties(registeredTemplate.getProperties());
///store only those matched and resolved in the final metadata store
Set<NifiProperty> updatedProperties = new HashSet<>();
//first get all those selected properties where the value differs from the template value
List<NifiProperty> modifiedProperties = registeredTemplate.findModifiedDefaultProperties();
if (modifiedProperties != null) {
propertyExpressionResolver.resolvePropertyExpressions(modifiedProperties,feedMetadata);
updatedProperties.addAll(modifiedProperties);
}
updatedProperties.addAll(matchedProperties);
updatedProperties.addAll(resolvedProperties);
updatedProperties.addAll(inputProperties);
feedMetadata.setProperties(new ArrayList<NifiProperty>(updatedProperties));
*/
//decrypt the metadata
feedModelTransform.decryptSensitivePropertyValues(feedMetadata);
FeedMetadata.STATE state = FeedMetadata.STATE.NEW;
try {
state = FeedMetadata.STATE.valueOf(feedMetadata.getState());
} catch (Exception e) {
//if the string isnt valid, disregard as it will end up disabling the feed.
}
boolean enabled = (FeedMetadata.STATE.NEW.equals(state) && feedMetadata.isActive()) || FeedMetadata.STATE.ENABLED.equals(state);
// flag to indicate to enable the feed later
//if this is the first time for this feed and it is set to be enabled, mark it to be enabled after we commit to the JCR store
boolean enableLater = false;
if (enabled && feedMetadata.isNew()) {
enableLater = true;
enabled = false;
feedMetadata.setState(FeedMetadata.STATE.DISABLED.name());
}
CreateFeedBuilder
feedBuilder =
CreateFeedBuilder.newFeed(nifiRestClient, nifiFlowCache, feedMetadata, registeredTemplate.getNifiTemplateId(), propertyExpressionResolver, propertyDescriptorTransform).enabled(enabled)
.removeInactiveVersionedProcessGroup(removeInactiveNifiVersionedFeedFlows);
if (registeredTemplate.isReusableTemplate()) {
feedBuilder.setReusableTemplate(true);
feedMetadata.setIsReusableFeed(true);
} else {
feedBuilder.inputProcessorType(feedMetadata.getInputProcessorType())
.feedSchedule(feedMetadata.getSchedule()).properties(feedMetadata.getProperties());
if (registeredTemplate.usesReusableTemplate()) {
for (ReusableTemplateConnectionInfo connection : registeredTemplate.getReusableTemplateConnections()) {
feedBuilder.addInputOutputPort(new InputOutputPort(connection.getReusableTemplateInputPortName(), connection.getFeedOutputPortName()));
}
}
}
NifiProcessGroup
entity = feedBuilder.build();
feed = new NifiFeed(feedMetadata, entity);
//set the original feedProperties back to the feed
feedMetadata.setProperties(orignialFeedProperties);
//encrypt the metadata properties
feedModelTransform.encryptSensitivePropertyValues(feedMetadata);
if (entity.isSuccess()) {
feedMetadata.setNifiProcessGroupId(entity.getProcessGroupEntity().getId());
try {
saveFeed(feedMetadata);
feed.setEnableAfterSave(enableLater);
feed.setSuccess(true);
feedBuilder.checkAndRemoveVersionedProcessGroup();
} catch (Exception e) {
feed.setSuccess(false);
feed.addErrorMessage(e);
}
} else {
feed.setSuccess(false);
}
if (!feed.isSuccess()) {
if (!entity.isRolledBack()) {
try {
feedBuilder.rollback();
} catch (FeedRollbackException rollbackException) {
log.error("Error rolling back feed {}. {} ", feedMetadata.getCategoryAndFeedName(), rollbackException.getMessage());
feed.addErrorMessage("Error occurred in rolling back the Feed.");
}
entity.setRolledBack(true);
}
}
return feed;
}
private void saveFeed(final FeedMetadata feed) {
if (StringUtils.isBlank(feed.getId())) {
feed.setIsNew(true);
}
metadataAccess.commit(() -> {
List<? extends HadoopSecurityGroup> previousSavedSecurityGroups = null;
// Store the old security groups before saving beccause we need to compare afterward
if (feed.isNew()) {
Feed existing = feedProvider.findBySystemName(feed.getCategory().getSystemName(), feed.getSystemFeedName());
// Since we know this is expected to be new check if the category/feed name combo is already being used.
if (existing != null) {
throw new DuplicateFeedNameException(feed.getCategoryName(), feed.getFeedName());
}
} else {
Feed previousStateBeforeSaving = feedProvider.findById(feedProvider.resolveId(feed.getId()));
Map<String, String> userProperties = previousStateBeforeSaving.getUserProperties();
previousSavedSecurityGroups = previousStateBeforeSaving.getSecurityGroups();
}
//if this is the first time saving this feed create a new one
Feed domainFeed = feedModelTransform.feedToDomain(feed);
if (domainFeed.getState() == null) {
domainFeed.setState(Feed.State.ENABLED);
}
//initially save the feed
if (feed.isNew()) {
domainFeed = feedProvider.update(domainFeed);
}
final String domainId = domainFeed.getId().toString();
final String feedName = FeedNameUtil.fullName(domainFeed.getCategory().getName(), domainFeed.getName());
// Build preconditions
assignFeedDependencies(feed, domainFeed);
//Assign the datasources
assignFeedDatasources(feed, domainFeed);
boolean isStream = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().isStream() : false;
Long timeBetweenBatchJobs = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().getTimeBetweenStartingBatchJobs() : 0L;
//sync the feed information to ops manager
metadataAccess.commit(() -> opsManagerFeedProvider.save(opsManagerFeedProvider.resolveId(domainId), feedName, isStream, timeBetweenBatchJobs));
// Update hadoop security group polices if the groups changed
if (!feed.isNew() && !ListUtils.isEqualList(previousSavedSecurityGroups, domainFeed.getSecurityGroups())) {
List<? extends HadoopSecurityGroup> securityGroups = domainFeed.getSecurityGroups();
List<String> groupsAsCommaList = securityGroups.stream().map(group -> group.getName()).collect(Collectors.toList());
hadoopAuthorizationService.updateSecurityGroupsForAllPolicies(feed.getSystemCategoryName(), feed.getSystemFeedName(), groupsAsCommaList, domainFeed.getProperties());
}
// Update Hive metastore
try {
feedHiveTableService.updateColumnDescriptions(feed);
} catch (final DataAccessException e) {
log.warn("Failed to update column descriptions for feed: {}", feed.getCategoryAndFeedDisplayName(), e);
}
// Update Kylo metastore
domainFeed = feedProvider.update(domainFeed);
// Return result
return feed;
}, (e) -> {
if (feed.isNew() && StringUtils.isNotBlank(feed.getId())) {
//Rollback ops Manager insert if it is newly created
metadataAccess.commit(() -> {
opsManagerFeedProvider.delete(opsManagerFeedProvider.resolveId(feed.getId()));
return null;
});
}
});
}
/**
* Looks for the Feed Preconditions and assigns the Feed Dependencies
*/
private void assignFeedDependencies(FeedMetadata feed, Feed domainFeed) {
final Feed.ID domainFeedId = domainFeed.getId();
List<PreconditionRule> preconditions = feed.getSchedule().getPreconditions();
if (preconditions != null) {
PreconditionPolicyTransformer transformer = new PreconditionPolicyTransformer(preconditions);
transformer.applyFeedNameToCurrentFeedProperties(feed.getCategory().getSystemName(), feed.getSystemFeedName());
List<com.thinkbiganalytics.metadata.rest.model.sla.ObligationGroup> transformedPreconditions = transformer.getPreconditionObligationGroups();
ServiceLevelAgreementBuilder
preconditionBuilder =
feedProvider.buildPrecondition(domainFeed.getId()).name("Precondition for feed " + feed.getCategoryAndFeedName() + " (" + domainFeed.getId() + ")");
for (com.thinkbiganalytics.metadata.rest.model.sla.ObligationGroup precondition : transformedPreconditions) {
for (Obligation group : precondition.getObligations()) {
preconditionBuilder.obligationGroupBuilder(ObligationGroup.Condition.valueOf(precondition.getCondition())).obligationBuilder().metric(group.getMetrics()).build();
}
}
preconditionBuilder.build();
//add in the lineage dependency relationships
//will the feed exist in the jcr store here if it is new??
//store the existing list of dependent feeds to track and delete those that dont match
Set<Feed.ID> oldDependentFeedIds = new HashSet<Feed.ID>();
Set<Feed.ID> newDependentFeedIds = new HashSet<Feed.ID>();
List<Feed> dependentFeeds = domainFeed.getDependentFeeds();
if (dependentFeeds != null && !dependentFeeds.isEmpty()) {
dependentFeeds.stream().forEach(dependentFeed -> {
oldDependentFeedIds.add(dependentFeed.getId());
});
}
//find those preconditions that are marked as dependent feed types
List<Precondition> preconditionPolicies = transformer.getPreconditionPolicies();
preconditionPolicies.stream().filter(precondition -> precondition instanceof DependentFeedPrecondition).forEach(dependentFeedPrecondition -> {
DependentFeedPrecondition feedPrecondition = (DependentFeedPrecondition) dependentFeedPrecondition;
List<String> dependentFeedNames = feedPrecondition.getDependentFeedNames();
if (dependentFeedNames != null && !dependentFeedNames.isEmpty()) {
//find the feed
for (String dependentFeedName : dependentFeedNames) {
Feed dependentFeed = feedProvider.findBySystemName(dependentFeedName);
if (dependentFeed != null) {
Feed.ID newDependentFeedId = dependentFeed.getId();
newDependentFeedIds.add(newDependentFeedId);
//add and persist it if it doesnt already exist
if (!oldDependentFeedIds.contains(newDependentFeedId)) {
feedProvider.addDependent(domainFeedId, dependentFeed.getId());
}
}
}
}
});
//delete any of those dependent feed ids from the oldDependentFeeds that are not part of the newDependentFeedIds
oldDependentFeedIds.stream().filter(oldFeedId -> !newDependentFeedIds.contains(oldFeedId))
.forEach(dependentFeedToDelete -> feedProvider.removeDependent(domainFeedId, dependentFeedToDelete));
}
}
private void assignFeedDatasources(FeedMetadata feed, Feed domainFeed) {
final Feed.ID domainFeedId = domainFeed.getId();
Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> sources = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>();
Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> destinations = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>();
String uniqueName = FeedNameUtil.fullName(feed.getCategory().getSystemName(), feed.getSystemFeedName());
RegisteredTemplate template = feed.getRegisteredTemplate();
if (template == null) {
//fetch it for checks
template = templateRestProvider.getRegisteredTemplate(feed.getTemplateId());
}
//find Definition registration
derivedDatasourceFactory.populateDatasources(feed, template, sources, destinations);
//remove the older sources only if they have changed
if (domainFeed.getSources() != null) {
Set<Datasource.ID>
existingSourceIds =
((List<FeedSource>) domainFeed.getSources()).stream().filter(source -> source.getDatasource() != null).map(source1 -> source1.getDatasource().getId()).collect(Collectors.toSet());
if (!sources.containsAll(existingSourceIds) || (sources.size() != existingSourceIds.size())) {
//remove older sources
//cant do it here for some reason.. need to do it in a separate transaction
feedProvider.removeFeedSources(domainFeedId);
}
}
sources.stream().forEach(sourceId -> feedProvider.ensureFeedSource(domainFeedId, sourceId));
destinations.stream().forEach(sourceId -> feedProvider.ensureFeedDestination(domainFeedId, sourceId));
//TODO deal with inputs changing sources?
}
@Override
public void deleteFeed(@Nonnull final String feedId) {
metadataAccess.commit(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS);
Feed feed = feedProvider.getFeed(feedProvider.resolveFeed(feedId));
//unschedule any SLAs
serviceLevelAgreementService.unscheduleServiceLevelAgreement(feed.getId());
feedProvider.deleteFeed(feed.getId());
opsManagerFeedProvider.delete(opsManagerFeedProvider.resolveId(feedId));
return true;
});
}
@Override
public void enableFeedCleanup(@Nonnull String feedId) {
metadataAccess.commit(() -> {
final Feed.ID id = feedProvider.resolveFeed(feedId);
return feedProvider.mergeFeedProperties(id, ImmutableMap.of(FeedProperties.CLEANUP_ENABLED, "true"));
});
}
private boolean enableFeed(final Feed.ID feedId) {
return metadataAccess.commit(() -> {
boolean enabled = feedProvider.enableFeed(feedId);
Feed domainFeed = feedProvider.findById(feedId);
FeedMetadata feedMetadata = null;
if (domainFeed != null) {
feedMetadata = feedModelTransform.deserializeFeedMetadata(domainFeed, true);
feedMetadata.setState(FeedMetadata.STATE.ENABLED.name());
domainFeed.setJson(ObjectMapperSerializer.serialize(feedMetadata));
feedProvider.update(domainFeed);
}
if (enabled) {
notifyFeedStateChange(feedMetadata, feedId, Feed.State.ENABLED, MetadataChange.ChangeType.UPDATE);
}
return enabled;
});
}
// @Transactional(transactionManager = "metadataTransactionManager")
private boolean disableFeed(final Feed.ID feedId) {
return metadataAccess.commit(() -> {
boolean disabled = feedProvider.disableFeed(feedId);
Feed domainFeed = feedProvider.findById(feedId);
FeedMetadata feedMetadata = null;
if (domainFeed != null) {
feedMetadata = feedModelTransform.deserializeFeedMetadata(domainFeed, false);
feedMetadata.setState(FeedMetadata.STATE.DISABLED.name());
domainFeed.setJson(ObjectMapperSerializer.serialize(feedMetadata));
feedProvider.update(domainFeed);
}
if (disabled) {
notifyFeedStateChange(feedMetadata, feedId, Feed.State.DISABLED, MetadataChange.ChangeType.UPDATE);
}
return disabled;
});
}
public FeedSummary enableFeed(final String feedId) {
return metadataAccess.commit(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS);
if (StringUtils.isNotBlank(feedId)) {
FeedMetadata feedMetadata = getFeedById(feedId);
Feed.ID domainId = feedProvider.resolveFeed(feedId);
boolean enabled = enableFeed(domainId);
//re fetch it
if (enabled) {
feedMetadata.setState(Feed.State.ENABLED.name());
serviceLevelAgreementService.enableServiceLevelAgreementSchedule(domainId);
}
FeedSummary feedSummary = new FeedSummary(feedMetadata);
//start any Slas
return feedSummary;
}
return null;
});
}
public FeedSummary disableFeed(final String feedId) {
return metadataAccess.commit(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS);
if (StringUtils.isNotBlank(feedId)) {
FeedMetadata feedMetadata = getFeedById(feedId);
Feed.ID domainId = feedProvider.resolveFeed(feedId);
boolean disabled = disableFeed(domainId);
//re fetch it
if (disabled) {
feedMetadata.setState(Feed.State.DISABLED.name());
serviceLevelAgreementService.disableServiceLevelAgreementSchedule(domainId);
}
FeedSummary feedSummary = new FeedSummary(feedMetadata);
return feedSummary;
}
return null;
});
}
@Override
/**
* Applies new LableValue array to the FieldProperty.selectableValues {label = Category.Display Feed Name, value=category.system_feed_name}
*/
public void applyFeedSelectOptions(List<FieldRuleProperty> properties) {
if (properties != null && !properties.isEmpty()) {
List<FeedSummary> feedSummaries = getFeedSummaryData();
List<LabelValue> feedSelection = new ArrayList<>();
for (FeedSummary feedSummary : feedSummaries) {
boolean isDisabled = feedSummary.getState() == Feed.State.DISABLED.name();
boolean
canEditDetails =
accessController.isEntityAccessControlled() ? feedSummary.hasAction(FeedAccessControl.EDIT_DETAILS.getSystemName())
: accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_FEEDS);
Map<String, Object> labelValueProperties = new HashMap<>();
labelValueProperties.put("feed:disabled", isDisabled);
labelValueProperties.put("feed:editDetails", canEditDetails);
feedSelection.add(new LabelValue(feedSummary.getCategoryAndFeedDisplayName() + (isDisabled ? " (DISABLED) " : ""), feedSummary.getCategoryAndFeedSystemName(),
isDisabled ? "This feed is currently disabled" : "", labelValueProperties));
}
for (FieldRuleProperty property : properties) {
property.setSelectableValues(feedSelection);
if (property.getValues() == null) {
property.setValues(new ArrayList<>()); // reset the intial values to be an empty arraylist
}
}
}
}
@Nonnull
@Override
public Set<UserField> getUserFields() {
return metadataAccess.read(() -> {
boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
return hasPermission ? UserPropertyTransform.toUserFields(feedProvider.getUserFields()) : Collections.emptySet();
});
}
@Override
public void setUserFields(@Nonnull final Set<UserField> userFields) {
boolean hasPermission = this.accessController.hasPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS);
if (hasPermission) {
feedProvider.setUserFields(UserPropertyTransform.toUserFieldDescriptors(userFields));
}
}
@Nonnull
@Override
public Optional<Set<UserProperty>> getUserFields(@Nonnull final String categoryId) {
return metadataAccess.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_FEEDS);
final Optional<Set<UserFieldDescriptor>> categoryUserFields = categoryProvider.getFeedUserFields(categoryProvider.resolveId(categoryId));
final Set<UserFieldDescriptor> globalUserFields = feedProvider.getUserFields();
if (categoryUserFields.isPresent()) {
return Optional.of(UserPropertyTransform.toUserProperties(Collections.emptyMap(), Sets.union(globalUserFields, categoryUserFields.get())));
} else {
return Optional.empty();
}
});
}
private class FeedPropertyChangeDispatcher implements MetadataEventListener<FeedPropertyChangeEvent> {
@Override
public void notify(@Nonnull final FeedPropertyChangeEvent metadataEvent) {
Properties oldProperties = metadataEvent.getData().getNifiPropertiesToDelete();
metadataAccess.commit(() -> {
Feed feed = feedProvider.getFeed(feedProvider.resolveFeed(metadataEvent.getData().getFeedId()));
oldProperties.forEach((k, v) -> {
feed.removeProperty((String) k);
});
}, MetadataAccess.SERVICE);
}
}
/**
* update the audit information for feed state changes
*
* @param feedId the feed id
* @param state the new state
* @param changeType the event type
*/
private void notifyFeedStateChange(FeedMetadata feedMetadata, Feed.ID feedId, Feed.State state, MetadataChange.ChangeType changeType) {
final Principal principal = SecurityContextHolder.getContext().getAuthentication() != null
? SecurityContextHolder.getContext().getAuthentication()
: null;
String feedName = feedMetadata != null ? feedMetadata.getCategoryAndFeedName() : "";
FeedChange change = new FeedChange(changeType, feedName, feedName, feedId, state);
FeedChangeEvent event = new FeedChangeEvent(change, DateTime.now(), principal);
metadataEventService.notify(event);
}
}
| |
package org.folio.marccat.dao;
import net.sf.hibernate.Hibernate;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.Session;
import net.sf.hibernate.type.Type;
import org.folio.marccat.business.common.SortFormException;
import org.folio.marccat.business.descriptor.SortFormParameters;
import org.folio.marccat.config.log.Log;
import org.folio.marccat.dao.common.TransactionalHibernateOperation;
import org.folio.marccat.dao.persistence.CPY_ID;
import org.folio.marccat.dao.persistence.SHLF_LIST;
import org.folio.marccat.exception.DataAccessException;
import org.folio.marccat.exception.RecordNotFoundException;
import java.sql.*;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
@SuppressWarnings("unchecked")
public class DAOCopy extends AbstractDAO {
private static Log logger = new Log(DAOCopy.class);
private String INSERT_BND_CPY = "INSERT INTO BND_CPY a "
+ "SELECT BIB_ITM_NBR," + "CPY_ID_NBR," + "SHLF_LIST_KEY_NBR,"
+ "ORG_NBR," + "BRNCH_ORG_NBR," + "ORGNL_ORG_NBR," + "BRCDE_NBR,"
+ "DYNIX_SRL_ID_NBR," + "TRSTN_DTE," + "CRTN_DTE," + "ILL_CDE,"
+ "HLDG_SBCPT_STUS_CDE," + "HLDG_RTNTN_CDE," + "LOAN_PRD_CDE,"
+ "HLDG_SRS_TRMT_CDE," + "HLDG_STUS_TYP_CDE," + "LCTN_NME_CDE,"
+ "HLDG_LVL_OF_DTL_CDE," + "HLDG_ACSN_LIST_CDE," + "CPY_NBR_DSC,"
+ "CPY_RMRK_NTE," + "CPY_STMT_TXT," + "CPY_RMRK_NTE_SRT_FORM,"
+ "TMP_LCTN_ORG_NBR," + "TMP_LCTN_NME_CDE," + "MTRL_DESC,"
+ "CST, " + "CURCY_TYP_CDE, " + "CURCY_XCHNG_RTE,"
+ "TRSFR_CSTDY_NBR, " + "PHSCL_CPY_TPE,"
+ "MTHD_ACQ FROM cpy_id b WHERE b.cpy_id_nbr=?";
private String INSERT_BND_SHLF_LIST = "INSERT INTO BND_SHLF_LIST "
+ "SELECT ORG_NBR,SHLF_LIST_KEY_NBR,SHLF_LIST_TYP_CDE,SHLF_LIST_STRNG_TEXT,SHLF_LIST_SRT_FORM FROM SHLF_LIST A WHERE A.SHLF_LIST_KEY_NBR=?";
/**
* Loads copy by copy number.
*
* @param session -- the hibernate session associated to request.
* @param copyNumber -- the copy number.
* @return {@link CPY_ID}
* @throws DataAccessException in case of hibernate exception.
*/
public CPY_ID load(final Session session, final int copyNumber) throws DataAccessException {
CPY_ID c = null;
try {
c = (CPY_ID) session.get(CPY_ID.class, copyNumber);
if (c != null) {
if (c.getShelfListKeyNumber() != null) {
c.setShelfList(new ShelfListDAO().load(c.getShelfListKeyNumber(), session));
}
}
// todo: to manage from external configuration module based on orgNumber
/*if ((new DAOGlobalVariable ( ).getValueByName ("barrcode")).equals ("1")) {
c.setBarcodeAssigned (true);
} else {
c.setBarcodeAssigned (false);
}*/
} catch (HibernateException e) {
throw new DataAccessException(e);
}
return c;
}
/**
* @param copyBarCode is the barCode of the copy
* @return the BibItemNumber from CPY_ID table
* @since 1.0
*/
/**
* Gets the amicus number associated to copy.
*
* @param session -- the hibernate session associated to request.
* @param barCode -- the barcode associated to copy.
* @return the amicus number.
* @throws DataAccessException in case of hibernate exception.
*/
public int getBibItemNumber(final Session session, final String barCode) throws DataAccessException {
int result = 0;
try {
List<CPY_ID> listAllCopies = session.find("from CPY_ID ci where ci.barCodeNumber = '" + barCode + "'");
return listAllCopies.stream().filter(Objects::nonNull).reduce((first, second) -> second).get().getBibItemNumber();
} catch (HibernateException e) {
throw new DataAccessException(e);
//log error and return 0?
}
}
/**
* @param copyIdNumber is the copyIdNumber of the copy
* @return the BibItemNumber from CPY_ID table
* @since 1.0
*/
public int getBibItemNumber(int copyIdNumber) throws DataAccessException {
int result = 0;
List listAllCopies = null;
try {
Session s = currentSession();
listAllCopies = s.find("from CPY_ID ci where ci.copyIdNumber = "
+ copyIdNumber);
Iterator iter = listAllCopies.iterator();
while (iter.hasNext()) {
CPY_ID rawCopy = (CPY_ID) iter.next();
result = rawCopy.getBibItemNumber();
}
} catch (HibernateException e) {
logAndWrap(e);
}
return result;
}
public void delete(final int copyNumber, final String userName) throws DataAccessException {
new TransactionalHibernateOperation() {
public void doInHibernateTransaction(Session s)
throws HibernateException,
DataAccessException {
// TODO make sure no circulation records (AMICUS doesn't)
CPY_ID copy = (CPY_ID) s.get(CPY_ID.class, new Integer(
copyNumber));
if (copy.getShelfListKeyNumber() != null) {
//TODO passare la session
copy.setShelfList(new ShelfListDAO().load(copy.getShelfListKeyNumber().intValue(), null));
}
if (copy == null) {
throw new RecordNotFoundException();
}
// detach the shelflist
detachShelfList(copy, copy.getShelfList());
saveCpyIdAgent(userName, copy.getCopyIdNumber());
// delete the copy itself
s.delete(copy);
DAOSummaryHolding ds = new DAOSummaryHolding();
ds.deleteRecord(copy.getBibItemNumber(), copy
.getOrganisationNumber());
}
}.execute();
}
public void detachShelfList(CPY_ID copy, SHLF_LIST shelf)
throws DataAccessException {
Session s = currentSession();
if (shelf == null) {
return;
}
try {
/*
* If only our copies bib_item is using this shelf list then remove
* the entry from SHLF_LIST_ACS_PNT
*/
if (countShelfFromCopyUses(copy, shelf) != 0) {
if (countShelfListAccessPointUses(copy, shelf) == 1) {
logger.info("Cancella SHLF_LIST_ACS_PNT");
s
.delete(
"from SHLF_LIST_ACS_PNT as c where c.shelfListKeyNumber = ?"
+ " and c.bibItemNumber = ?",
new Object[]{
new Integer(shelf
.getShelfListKeyNumber()),
new Integer(copy.getBibItemNumber())},
new Type[]{Hibernate.INTEGER,
Hibernate.INTEGER});
/*
* AND if only our copy is using this shelf list number then
* delete the shelf list number
*/
List l = find(
"select count(*) from CPY_ID as c where c.shelfListKeyNumber = ?",
new Object[]{new Integer(shelf
.getShelfListKeyNumber())},
new Type[]{Hibernate.INTEGER});
if (l.size() > 0 && ((Integer) l.get(0)).intValue() == 1) {
s.delete(shelf);
}
}
}
} catch (HibernateException e) {
logAndWrap(e);
}
}
/**
* Counts the number of copies using this shelf list and bib_itm
*
* @since 1.0
*/
public int countShelfListAccessPointUses(CPY_ID copy, SHLF_LIST shelf)
throws DataAccessException {
List l = find(
"select count(*) from CPY_ID as c where c.shelfListKeyNumber = ?"
+ " and c.bibItemNumber = ?", new Object[]{
new Integer(shelf.getShelfListKeyNumber()),
new Integer(copy.getBibItemNumber())}, new Type[]{
Hibernate.INTEGER, Hibernate.INTEGER});
if (l.size() > 0) {
return ((Integer) l.get(0)).intValue();
} else {
return 0;
}
}
/**
* Counts the number of copies using this shelf list and bib_itm
*
* @since 1.0
*/
public int countShelfFromCopyUses(CPY_ID copy, SHLF_LIST shelf)
throws DataAccessException {
List l = find(
"select count(*) from CPY_ID as c where c.shelfListKeyNumber = ?"
+ " and c.bibItemNumber = ?"
+ " and c.copyIdNumber = ?", new Object[]{
new Integer(shelf.getShelfListKeyNumber()),
new Integer(copy.getBibItemNumber()),
new Integer(copy.getCopyIdNumber())},
new Type[]{Hibernate.INTEGER, Hibernate.INTEGER,
Hibernate.INTEGER});
if (l.size() > 0) {
return ((Integer) l.get(0)).intValue();
} else {
return 0;
}
}
public String calculateSortForm(String text, SortFormParameters parms)
throws DataAccessException {
String result = "";
int bufSize = 600;
int rc;
Session s = currentSession();
CallableStatement proc = null;
Connection connection = null;
try {
connection = s.connection();
proc = connection
.prepareCall("{ ? = call AMICUS.PACK_SORTFORM.SF_PREPROCESS(?, ?, ?, ?, ?, ?, ?, ?) }");
proc.registerOutParameter(1, Types.INTEGER);
proc.setString(2, text);
proc.registerOutParameter(3, Types.VARCHAR);
proc.setInt(4, bufSize);
proc.setInt(5, parms.getSortFormMainType());
proc.setInt(6, parms.getSortFormSubType());
proc.setInt(7, parms.getNameTitleOrSubjectType());
proc.setInt(8, parms.getNameSubtype());
proc.setInt(9, parms.getSkipInFiling());
proc.execute();
rc = proc.getInt(1);
if (rc != 0) {
throw new SortFormException(String.valueOf(rc));
}
result = proc.getString(3);
proc.close();
proc = connection
.prepareCall("{ ? = call AMICUS.PACK_SORTFORM.SF_BUILDSRTFRM(?, ?, ?, ?, ?, ?, ?, ?) }");
proc.registerOutParameter(1, Types.INTEGER);
proc.setString(2, result);
proc.registerOutParameter(3, Types.VARCHAR);
proc.setInt(4, bufSize);
proc.setInt(5, parms.getSortFormMainType());
proc.setInt(6, parms.getSortFormSubType());
proc.setInt(7, parms.getNameTitleOrSubjectType());
proc.setInt(8, parms.getNameSubtype());
proc.setInt(9, parms.getSkipInFiling());
proc.execute();
rc = proc.getInt(1);
if (rc != 0) {
throw new SortFormException(String.valueOf(rc));
}
result = proc.getString(3);
} catch (HibernateException e) {
logAndWrap(e);
} catch (SQLException e) {
logAndWrap(e);
} finally {
try {
if (proc != null) {
proc.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
return result;
}
public void saveCpyIdAgent(String userName, int cpyIdNbr) throws DataAccessException {
Connection connection = null;
PreparedStatement stmt0 = null;
PreparedStatement stmt1 = null;
PreparedStatement stmt2 = null;
Session session = currentSession();
int count = 0;
try {
connection = session.connection();
// --------> Lock della riga
stmt0 = connection.prepareStatement("SELECT * FROM CPY_ID_AGENT WHERE CPY_ID_NBR = ? FOR UPDATE");
stmt0.setInt(1, cpyIdNbr);
stmt1 = connection.prepareStatement("UPDATE CPY_ID_AGENT SET USERNAME = ?, TRSTN_DTE = SYSDATE , AGENT_ID = 1 WHERE CPY_ID_NBR = ?");
stmt1.setString(1, userName);
// stmt1.setDate(2, new java.sql.Date(System.currentTimeMillis()));
stmt1.setInt(2, cpyIdNbr);
count = stmt1.executeUpdate();
if (!(count > 0)) {
stmt2 = connection.prepareStatement("INSERT INTO CPY_ID_AGENT (CPY_ID_NBR, USERNAME, AGENT_ID, TRSTN_DTE) VALUES (?,?,?,SYSDATE)");
stmt2.setInt(1, cpyIdNbr);
stmt2.setString(2, userName);
stmt2.setInt(3, 1);
// stmt2.setDate(4, new
// java.sql.Date(System.currentTimeMillis()));
stmt2.execute();
}
/* Il commit o rollback lo fa hibernate in automatico se le operazioni successive vanno bene: quindi se sulla CPY_ID va tutto ok committa altrimenti no */
// connection.commit();
} catch (HibernateException e) {
e.printStackTrace();
logAndWrap(e);
// try {
// connection.rollback();
// } catch (SQLException e1) {
// e1.printStackTrace();
// }
} catch (SQLException e) {
e.printStackTrace();
logAndWrap(e);
// try {
// connection.rollback();
// } catch (SQLException e1) {
// e1.printStackTrace();
// }
} finally {
try {
if (stmt0 != null) {
stmt0.close();
}
if (stmt1 != null) {
stmt1.close();
}
if (stmt2 != null) {
stmt2.close();
}
} catch (SQLException e) {
e.printStackTrace();
logAndWrap(e);
}
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.impl.jdbc.exec;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSetMetaData;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement;
import org.jkiss.dbeaver.model.impl.AbstractResultSet;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCTrace;
import org.jkiss.dbeaver.model.qm.QMUtils;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.*;
import java.util.Calendar;
import java.util.Map;
/**
* Managable result set
*/
public class JDBCResultSetImpl extends AbstractResultSet<JDBCSession, JDBCStatement> implements JDBCResultSet {
private static final Log log = Log.getLog(JDBCResultSetImpl.class);
private ResultSet original;
private final String description;
private JDBCResultSetMetaData metaData;
private long rowsFetched;
private long maxRows = -1;
private boolean fake;
private boolean disableLogging;
public static JDBCResultSet makeResultSet(@NotNull JDBCSession session, @Nullable JDBCStatement statement, @NotNull ResultSet original, String description, boolean disableLogging)
throws SQLException
{
return session.getDataSource().getJdbcFactory().createResultSet(session, statement, original, description, disableLogging);
}
protected JDBCResultSetImpl(@NotNull JDBCSession session, @Nullable JDBCStatement statement, @NotNull ResultSet original, String description, boolean disableLogging)
{
super(session, statement);
this.original = original;
this.disableLogging = disableLogging;
this.description = description;
this.fake = statement == null;
if (!disableLogging) {
// Notify handler
QMUtils.getDefaultHandler().handleResultSetOpen(this);
}
if (JDBCTrace.isApiTraceEnabled()) {
JDBCTrace.dumpResultSetOpen(this.original);
}
}
/*
protected JDBCResultSetImpl(JDBCStatementImpl statement, ResultSet original)
{
this.session = statement.getSession();
this.statement = statement;
this.original = original;
this.fake = false;
if (this.statement.isQMLoggingEnabled()) {
// Notify handler
QMUtils.getDefaultHandler().handleResultSetOpen(this);
}
}
*/
protected void beforeFetch()
{
// FIXME: starte/end block. Do we need them here?
//this.session.getProgressMonitor().startBlock(statement, null);
//QMUtils.getDefaultHandler().handleResultSetFetch(this);
}
protected void afterFetch()
{
//this.session.getProgressMonitor().endBlock();
}
@Override
public ResultSet getOriginal()
{
return original;
}
@Override
public JDBCSession getSession()
{
return session;
}
@Override
public JDBCStatement getSourceStatement()
{
if (fake && statement == null) {
// Make fake statement
JDBCFakeStatementImpl fakeStatement = new JDBCFakeStatementImpl(
session,
this,
"-- " + description, // Set description as commented SQL
disableLogging);
this.statement = fakeStatement;
fakeStatement.beforeExecute();
fakeStatement.afterExecute();
}
return statement;
}
@Override
public JDBCStatement getStatement()
{
return getSourceStatement();
}
@Override
public Object getAttributeValue(int index)
throws DBCException
{
checkNotEmpty();
try {
// JDBC uses 1-based indexes
return original.getObject(index + 1);
}
catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
@Override
public Object getAttributeValue(String name) throws DBCException {
checkNotEmpty();
try {
return original.getObject(name);
}
catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
private void checkNotEmpty()
{
if (original == null) {
throw new IllegalStateException();
}
}
@Override
public boolean nextRow()
throws DBCException
{
if (this.original == null) {
return false;
}
try {
return this.next();
}
catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
@Override
public boolean moveTo(int position) throws DBCException
{
if (this.original == null) {
return false;
}
try {
return this.absolute(position);
}
catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
@NotNull
@Override
public JDBCResultSetMetaData getMeta()
throws DBCException
{
if (metaData == null) {
try {
metaData = createMetaDataImpl();
} catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
return metaData;
}
@Nullable
@Override
public String getResultSetName() throws DBCException {
if (this.original == null) {
return null;
}
try {
return original.getCursorName();
}
catch (SQLException e) {
throw new DBCException(e, session.getExecutionContext());
}
}
@Override
public Object getFeature(String name) {
if (FEATURE_NAME_JDBC.equals(name)) {
return true;
}
return super.getFeature(name);
}
@Override
public ResultSetMetaData getMetaData()
throws SQLException
{
try {
return getMeta();
} catch (DBCException e) {
if (e.getCause() instanceof SQLException) {
throw (SQLException)e.getCause();
} else {
throw new SQLException(e);
}
}
}
public void setMaxRows(long maxRows) {
this.maxRows = maxRows;
}
@Override
public boolean next()
throws SQLException
{
if (this.original == null) {
return false;
}
// Check max rows
if (maxRows >= 0 && rowsFetched >= maxRows) {
return false;
}
this.beforeFetch();
try {
// Fetch next row
boolean fetched = original.next();
if (fetched) {
rowsFetched++;
}
if (fetched && JDBCTrace.isApiTraceEnabled()) {
JDBCTrace.dumpResultSetRow(this.original);
}
return fetched;
}
finally {
this.afterFetch();
}
}
@Override
public void close()
{
if (original != null) {
/*
// Check for warnings
try {
JDBCUtils.reportWarnings(session, getOriginal().getWarnings());
getOriginal().clearWarnings();
} catch (Throwable e) {
log.debug("Can't check for resultset warnings", e);
}
*/
if (!disableLogging) {
// Handle close
QMUtils.getDefaultHandler().handleResultSetClose(this, rowsFetched);
}
// Close result set
try {
original.close();
}
catch (SQLException e) {
log.error("Can't close result set", e);
}
}
if (fake && statement != null) {
statement.close();
}
if (JDBCTrace.isApiTraceEnabled()) {
JDBCTrace.dumpResultSetClose();
}
}
@Override
public boolean wasNull()
throws SQLException
{
checkNotEmpty();
return original.wasNull();
}
@Override
public String getString(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getString(columnIndex);
}
private static void traceGetValue(int columnIndex, String value) {
}
@Override
public boolean getBoolean(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getBoolean(columnIndex);
}
@Override
public byte getByte(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getByte(columnIndex);
}
@Override
public short getShort(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getShort(columnIndex);
}
@Override
public int getInt(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getInt(columnIndex);
}
@Override
public long getLong(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getLong(columnIndex);
}
@Override
public float getFloat(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getFloat(columnIndex);
}
@Override
public double getDouble(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getDouble(columnIndex);
}
@Override
@Deprecated
public BigDecimal getBigDecimal(int columnIndex, int scale)
throws SQLException
{
checkNotEmpty();
return original.getBigDecimal(columnIndex, scale);
}
@Override
public byte[] getBytes(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getBytes(columnIndex);
}
@Override
public Date getDate(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getDate(columnIndex);
}
@Override
public Time getTime(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getTime(columnIndex);
}
@Override
public Timestamp getTimestamp(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getTimestamp(columnIndex);
}
@Override
public InputStream getAsciiStream(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getAsciiStream(columnIndex);
}
@Override
@Deprecated
public InputStream getUnicodeStream(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getUnicodeStream(columnIndex);
}
@Override
public InputStream getBinaryStream(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getBinaryStream(columnIndex);
}
@Override
public String getString(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getString(columnLabel);
}
@Override
public boolean getBoolean(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getBoolean(columnLabel);
}
@Override
public byte getByte(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getByte(columnLabel);
}
@Override
public short getShort(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getShort(columnLabel);
}
@Override
public int getInt(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getInt(columnLabel);
}
@Override
public long getLong(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getLong(columnLabel);
}
@Override
public float getFloat(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getFloat(columnLabel);
}
@Override
public double getDouble(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getDouble(columnLabel);
}
@Override
@Deprecated
public BigDecimal getBigDecimal(String columnLabel, int scale)
throws SQLException
{
checkNotEmpty();
return original.getBigDecimal(columnLabel, scale);
}
@Override
public byte[] getBytes(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getBytes(columnLabel);
}
@Override
public Date getDate(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getDate(columnLabel);
}
@Override
public Time getTime(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getTime(columnLabel);
}
@Override
public Timestamp getTimestamp(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getTimestamp(columnLabel);
}
@Override
public InputStream getAsciiStream(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getAsciiStream(columnLabel);
}
@Override
@Deprecated
public InputStream getUnicodeStream(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getUnicodeStream(columnLabel);
}
@Override
public InputStream getBinaryStream(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getBinaryStream(columnLabel);
}
@Override
public SQLWarning getWarnings()
throws SQLException
{
checkNotEmpty();
return original.getWarnings();
}
@Override
public void clearWarnings()
throws SQLException
{
if (original == null) {
return;
}
original.clearWarnings();
}
@Nullable
@Override
public String getCursorName()
throws SQLException
{
if (original == null) {
return null;
}
return original.getCursorName();
}
@Override
public Object getObject(int columnIndex)
throws SQLException
{
checkNotEmpty();
return original.getObject(columnIndex);
}
@Override
public Object getObject(String columnLabel)
throws SQLException
{
checkNotEmpty();
return original.getObject(columnLabel);
}
@Override
public int findColumn(String columnLabel)
throws SQLException
{
return original.findColumn(columnLabel);
}
@Override
public Reader getCharacterStream(int columnIndex)
throws SQLException
{
return original.getCharacterStream(columnIndex);
}
@Override
public Reader getCharacterStream(String columnLabel)
throws SQLException
{
return original.getCharacterStream(columnLabel);
}
@Override
public BigDecimal getBigDecimal(int columnIndex)
throws SQLException
{
return original.getBigDecimal(columnIndex);
}
@Override
public BigDecimal getBigDecimal(String columnLabel)
throws SQLException
{
return original.getBigDecimal(columnLabel);
}
@Override
public boolean isBeforeFirst()
throws SQLException
{
return original.isBeforeFirst();
}
@Override
public boolean isAfterLast()
throws SQLException
{
return original.isAfterLast();
}
@Override
public boolean isFirst()
throws SQLException
{
return original.isFirst();
}
@Override
public boolean isLast()
throws SQLException
{
return original.isLast();
}
@Override
public void beforeFirst()
throws SQLException
{
original.beforeFirst();
}
@Override
public void afterLast()
throws SQLException
{
original.afterLast();
}
@Override
public boolean first()
throws SQLException
{
return original.first();
}
@Override
public boolean last()
throws SQLException
{
return original.last();
}
@Override
public int getRow()
throws SQLException
{
return original.getRow();
}
@Override
public boolean absolute(int row)
throws SQLException
{
return original.absolute(row);
}
@Override
public boolean relative(int rows)
throws SQLException
{
return original.relative(rows);
}
@Override
public boolean previous()
throws SQLException
{
return original.previous();
}
@Override
public void setFetchDirection(int direction)
throws SQLException
{
original.setFetchDirection(direction);
}
@Override
public int getFetchDirection()
throws SQLException
{
return original.getFetchDirection();
}
@Override
public void setFetchSize(int rows)
throws SQLException
{
original.setFetchSize(rows);
}
@Override
public int getFetchSize()
throws SQLException
{
return original.getFetchSize();
}
@Override
public int getType()
throws SQLException
{
return original.getType();
}
@Override
public int getConcurrency()
throws SQLException
{
return original.getConcurrency();
}
@Override
public boolean rowUpdated()
throws SQLException
{
return original.rowUpdated();
}
@Override
public boolean rowInserted()
throws SQLException
{
return original.rowInserted();
}
@Override
public boolean rowDeleted()
throws SQLException
{
return original.rowDeleted();
}
@Override
public void updateNull(int columnIndex)
throws SQLException
{
original.updateNull(columnIndex);
}
@Override
public void updateBoolean(int columnIndex, boolean x)
throws SQLException
{
original.updateBoolean(columnIndex, x);
}
@Override
public void updateByte(int columnIndex, byte x)
throws SQLException
{
original.updateByte(columnIndex, x);
}
@Override
public void updateShort(int columnIndex, short x)
throws SQLException
{
original.updateShort(columnIndex, x);
}
@Override
public void updateInt(int columnIndex, int x)
throws SQLException
{
original.updateInt(columnIndex, x);
}
@Override
public void updateLong(int columnIndex, long x)
throws SQLException
{
original.updateLong(columnIndex, x);
}
@Override
public void updateFloat(int columnIndex, float x)
throws SQLException
{
original.updateFloat(columnIndex, x);
}
@Override
public void updateDouble(int columnIndex, double x)
throws SQLException
{
original.updateDouble(columnIndex, x);
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x)
throws SQLException
{
original.updateBigDecimal(columnIndex, x);
}
@Override
public void updateString(int columnIndex, String x)
throws SQLException
{
original.updateString(columnIndex, x);
}
@Override
public void updateBytes(int columnIndex, byte[] x)
throws SQLException
{
original.updateBytes(columnIndex, x);
}
@Override
public void updateDate(int columnIndex, Date x)
throws SQLException
{
original.updateDate(columnIndex, x);
}
@Override
public void updateTime(int columnIndex, Time x)
throws SQLException
{
original.updateTime(columnIndex, x);
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x)
throws SQLException
{
original.updateTimestamp(columnIndex, x);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length)
throws SQLException
{
original.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length)
throws SQLException
{
original.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length)
throws SQLException
{
original.updateCharacterStream(columnIndex, x, length);
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength)
throws SQLException
{
original.updateObject(columnIndex, x, scaleOrLength);
}
@Override
public void updateObject(int columnIndex, Object x)
throws SQLException
{
original.updateObject(columnIndex, x);
}
@Override
public void updateNull(String columnLabel)
throws SQLException
{
original.updateNull(columnLabel);
}
@Override
public void updateBoolean(String columnLabel, boolean x)
throws SQLException
{
original.updateBoolean(columnLabel, x);
}
@Override
public void updateByte(String columnLabel, byte x)
throws SQLException
{
original.updateByte(columnLabel, x);
}
@Override
public void updateShort(String columnLabel, short x)
throws SQLException
{
original.updateShort(columnLabel, x);
}
@Override
public void updateInt(String columnLabel, int x)
throws SQLException
{
original.updateInt(columnLabel, x);
}
@Override
public void updateLong(String columnLabel, long x)
throws SQLException
{
original.updateLong(columnLabel, x);
}
@Override
public void updateFloat(String columnLabel, float x)
throws SQLException
{
original.updateFloat(columnLabel, x);
}
@Override
public void updateDouble(String columnLabel, double x)
throws SQLException
{
original.updateDouble(columnLabel, x);
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x)
throws SQLException
{
original.updateBigDecimal(columnLabel, x);
}
@Override
public void updateString(String columnLabel, String x)
throws SQLException
{
original.updateString(columnLabel, x);
}
@Override
public void updateBytes(String columnLabel, byte[] x)
throws SQLException
{
original.updateBytes(columnLabel, x);
}
@Override
public void updateDate(String columnLabel, Date x)
throws SQLException
{
original.updateDate(columnLabel, x);
}
@Override
public void updateTime(String columnLabel, Time x)
throws SQLException
{
original.updateTime(columnLabel, x);
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x)
throws SQLException
{
original.updateTimestamp(columnLabel, x);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length)
throws SQLException
{
original.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length)
throws SQLException
{
original.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length)
throws SQLException
{
original.updateCharacterStream(columnLabel, reader, length);
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength)
throws SQLException
{
original.updateObject(columnLabel, x, scaleOrLength);
}
@Override
public void updateObject(String columnLabel, Object x)
throws SQLException
{
original.updateObject(columnLabel, x);
}
@Override
public void insertRow()
throws SQLException
{
original.insertRow();
}
@Override
public void updateRow()
throws SQLException
{
original.updateRow();
}
@Override
public void deleteRow()
throws SQLException
{
original.deleteRow();
}
@Override
public void refreshRow()
throws SQLException
{
original.refreshRow();
}
@Override
public void cancelRowUpdates()
throws SQLException
{
original.cancelRowUpdates();
}
@Override
public void moveToInsertRow()
throws SQLException
{
original.moveToInsertRow();
}
@Override
public void moveToCurrentRow()
throws SQLException
{
original.moveToCurrentRow();
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map)
throws SQLException
{
return original.getObject(columnIndex, map);
}
@Override
public Ref getRef(int columnIndex)
throws SQLException
{
return original.getRef(columnIndex);
}
@Override
public Blob getBlob(int columnIndex)
throws SQLException
{
return original.getBlob(columnIndex);
}
@Override
public Clob getClob(int columnIndex)
throws SQLException
{
return original.getClob(columnIndex);
}
@Override
public Array getArray(int columnIndex)
throws SQLException
{
return original.getArray(columnIndex);
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map)
throws SQLException
{
return original.getObject(columnLabel, map);
}
@Override
public Ref getRef(String columnLabel)
throws SQLException
{
return original.getRef(columnLabel);
}
@Override
public Blob getBlob(String columnLabel)
throws SQLException
{
return original.getBlob(columnLabel);
}
@Override
public Clob getClob(String columnLabel)
throws SQLException
{
return original.getClob(columnLabel);
}
@Override
public Array getArray(String columnLabel)
throws SQLException
{
return original.getArray(columnLabel);
}
@Override
public Date getDate(int columnIndex, Calendar cal)
throws SQLException
{
return original.getDate(columnIndex, cal);
}
@Override
public Date getDate(String columnLabel, Calendar cal)
throws SQLException
{
return original.getDate(columnLabel, cal);
}
@Override
public Time getTime(int columnIndex, Calendar cal)
throws SQLException
{
return original.getTime(columnIndex, cal);
}
@Override
public Time getTime(String columnLabel, Calendar cal)
throws SQLException
{
return original.getTime(columnLabel, cal);
}
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal)
throws SQLException
{
return original.getTimestamp(columnIndex, cal);
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal)
throws SQLException
{
return original.getTimestamp(columnLabel, cal);
}
@Override
public URL getURL(int columnIndex)
throws SQLException
{
return original.getURL(columnIndex);
}
@Override
public URL getURL(String columnLabel)
throws SQLException
{
return original.getURL(columnLabel);
}
@Override
public void updateRef(int columnIndex, Ref x)
throws SQLException
{
original.updateRef(columnIndex, x);
}
@Override
public void updateRef(String columnLabel, Ref x)
throws SQLException
{
original.updateRef(columnLabel, x);
}
@Override
public void updateBlob(int columnIndex, Blob x)
throws SQLException
{
original.updateBlob(columnIndex, x);
}
@Override
public void updateBlob(String columnLabel, Blob x)
throws SQLException
{
original.updateBlob(columnLabel, x);
}
@Override
public void updateClob(int columnIndex, Clob x)
throws SQLException
{
original.updateClob(columnIndex, x);
}
@Override
public void updateClob(String columnLabel, Clob x)
throws SQLException
{
original.updateClob(columnLabel, x);
}
@Override
public void updateArray(int columnIndex, Array x)
throws SQLException
{
original.updateArray(columnIndex, x);
}
@Override
public void updateArray(String columnLabel, Array x)
throws SQLException
{
original.updateArray(columnLabel, x);
}
@Override
public RowId getRowId(int columnIndex)
throws SQLException
{
return original.getRowId(columnIndex);
}
@Override
public RowId getRowId(String columnLabel)
throws SQLException
{
return original.getRowId(columnLabel);
}
@Override
public void updateRowId(int columnIndex, RowId x)
throws SQLException
{
original.updateRowId(columnIndex, x);
}
@Override
public void updateRowId(String columnLabel, RowId x)
throws SQLException
{
original.updateRowId(columnLabel, x);
}
@Override
public int getHoldability()
throws SQLException
{
return original.getHoldability();
}
@Override
public boolean isClosed()
throws SQLException
{
return original.isClosed();
}
@Override
public void updateNString(int columnIndex, String nString)
throws SQLException
{
original.updateNString(columnIndex, nString);
}
@Override
public void updateNString(String columnLabel, String nString)
throws SQLException
{
original.updateNString(columnLabel, nString);
}
@Override
public void updateNClob(int columnIndex, NClob nClob)
throws SQLException
{
original.updateNClob(columnIndex, nClob);
}
@Override
public void updateNClob(String columnLabel, NClob nClob)
throws SQLException
{
original.updateNClob(columnLabel, nClob);
}
@Override
public NClob getNClob(int columnIndex)
throws SQLException
{
return original.getNClob(columnIndex);
}
@Override
public NClob getNClob(String columnLabel)
throws SQLException
{
return original.getNClob(columnLabel);
}
@Override
public SQLXML getSQLXML(int columnIndex)
throws SQLException
{
return original.getSQLXML(columnIndex);
}
@Override
public SQLXML getSQLXML(String columnLabel)
throws SQLException
{
return original.getSQLXML(columnLabel);
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject)
throws SQLException
{
original.updateSQLXML(columnIndex, xmlObject);
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject)
throws SQLException
{
original.updateSQLXML(columnLabel, xmlObject);
}
@Override
public String getNString(int columnIndex)
throws SQLException
{
return original.getNString(columnIndex);
}
@Override
public String getNString(String columnLabel)
throws SQLException
{
return original.getNString(columnLabel);
}
@Override
public Reader getNCharacterStream(int columnIndex)
throws SQLException
{
return original.getNCharacterStream(columnIndex);
}
@Override
public Reader getNCharacterStream(String columnLabel)
throws SQLException
{
return original.getNCharacterStream(columnLabel);
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length)
throws SQLException
{
original.updateNCharacterStream(columnIndex, x, length);
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length)
throws SQLException
{
original.updateNCharacterStream(columnLabel, reader, length);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length)
throws SQLException
{
original.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length)
throws SQLException
{
original.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length)
throws SQLException
{
original.updateCharacterStream(columnIndex, x, length);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length)
throws SQLException
{
original.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length)
throws SQLException
{
original.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length)
throws SQLException
{
original.updateCharacterStream(columnLabel, reader, length);
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length)
throws SQLException
{
original.updateBlob(columnIndex, inputStream, length);
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length)
throws SQLException
{
original.updateBlob(columnLabel, inputStream, length);
}
@Override
public void updateClob(int columnIndex, Reader reader, long length)
throws SQLException
{
original.updateClob(columnIndex, reader, length);
}
@Override
public void updateClob(String columnLabel, Reader reader, long length)
throws SQLException
{
original.updateClob(columnLabel, reader, length);
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length)
throws SQLException
{
original.updateNClob(columnIndex, reader, length);
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length)
throws SQLException
{
original.updateNClob(columnLabel, reader, length);
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x)
throws SQLException
{
original.updateNCharacterStream(columnIndex, x);
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader)
throws SQLException
{
original.updateNCharacterStream(columnLabel, reader);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x)
throws SQLException
{
original.updateAsciiStream(columnIndex, x);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x)
throws SQLException
{
original.updateBinaryStream(columnIndex, x);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x)
throws SQLException
{
original.updateCharacterStream(columnIndex, x);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x)
throws SQLException
{
original.updateAsciiStream(columnLabel, x);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x)
throws SQLException
{
original.updateBinaryStream(columnLabel, x);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader)
throws SQLException
{
original.updateCharacterStream(columnLabel, reader);
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream)
throws SQLException
{
original.updateBlob(columnIndex, inputStream);
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream)
throws SQLException
{
original.updateBlob(columnLabel, inputStream);
}
@Override
public void updateClob(int columnIndex, Reader reader)
throws SQLException
{
original.updateClob(columnIndex, reader);
}
@Override
public void updateClob(String columnLabel, Reader reader)
throws SQLException
{
original.updateClob(columnLabel, reader);
}
@Override
public void updateNClob(int columnIndex, Reader reader)
throws SQLException
{
original.updateNClob(columnIndex, reader);
}
@Override
public void updateNClob(String columnLabel, Reader reader)
throws SQLException
{
original.updateNClob(columnLabel, reader);
}
@Nullable
@Override
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException {
return original.getObject(columnIndex, type);
}
@Nullable
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
return original.getObject(columnLabel, type);
}
@Override
public <T> T unwrap(Class<T> iface)
throws SQLException
{
return original.unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface)
throws SQLException
{
return original.isWrapperFor(iface);
}
protected JDBCResultSetMetaData createMetaDataImpl() throws SQLException
{
return session.getDataSource().getJdbcFactory().createResultSetMetaData(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import org.apache.ignite.*;
import org.apache.ignite.cluster.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.cluster.*;
import org.apache.ignite.internal.processors.affinity.*;
import org.apache.ignite.internal.processors.cache.*;
import org.apache.ignite.internal.processors.cache.distributed.*;
import org.apache.ignite.internal.processors.cache.distributed.near.*;
import org.apache.ignite.internal.processors.cache.version.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.future.*;
import org.apache.ignite.internal.util.tostring.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.lang.*;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.IgniteSystemProperties.*;
/**
* Colocated get future.
*/
public class GridPartitionedGetFuture<K, V> extends GridCompoundIdentityFuture<Map<K, V>>
implements GridCacheFuture<Map<K, V>> {
/** */
private static final long serialVersionUID = 0L;
/** Default max remap count value. */
public static final int DFLT_MAX_REMAP_CNT = 3;
/** Logger reference. */
private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>();
/** Logger. */
private static IgniteLogger log;
/** Maximum number of attempts to remap key to the same primary node. */
private static final int MAX_REMAP_CNT = IgniteSystemProperties.getInteger(IGNITE_NEAR_GET_MAX_REMAPS,
DFLT_MAX_REMAP_CNT);
/** Context. */
private GridCacheContext<K, V> cctx;
/** Keys. */
private Collection<KeyCacheObject> keys;
/** Topology version. */
private AffinityTopologyVersion topVer;
/** Reload flag. */
private boolean reload;
/** Read-through flag. */
private boolean readThrough;
/** Force primary flag. */
private boolean forcePrimary;
/** Future ID. */
private IgniteUuid futId;
/** Version. */
private GridCacheVersion ver;
/** Trackable flag. */
private volatile boolean trackable;
/** Remap count. */
private AtomicInteger remapCnt = new AtomicInteger();
/** Subject ID. */
private UUID subjId;
/** Task name. */
private String taskName;
/** Whether to deserialize portable objects. */
private boolean deserializePortable;
/** Expiry policy. */
private IgniteCacheExpiryPolicy expiryPlc;
/** Skip values flag. */
private boolean skipVals;
/**
* @param cctx Context.
* @param keys Keys.
* @param topVer Topology version.
* @param readThrough Read through flag.
* @param reload Reload flag.
* @param forcePrimary If {@code true} then will force network trip to primary node even
* if called on backup node.
* @param subjId Subject ID.
* @param taskName Task name.
* @param deserializePortable Deserialize portable flag.
* @param expiryPlc Expiry policy.
* @param skipVals Skip values flag.
*/
public GridPartitionedGetFuture(
GridCacheContext<K, V> cctx,
Collection<KeyCacheObject> keys,
AffinityTopologyVersion topVer,
boolean readThrough,
boolean reload,
boolean forcePrimary,
@Nullable UUID subjId,
String taskName,
boolean deserializePortable,
@Nullable IgniteCacheExpiryPolicy expiryPlc,
boolean skipVals
) {
super(cctx.kernalContext(), CU.<K, V>mapsReducer(keys.size()));
assert !F.isEmpty(keys);
this.cctx = cctx;
this.keys = keys;
this.topVer = topVer;
this.readThrough = readThrough;
this.reload = reload;
this.forcePrimary = forcePrimary;
this.subjId = subjId;
this.deserializePortable = deserializePortable;
this.taskName = taskName;
this.expiryPlc = expiryPlc;
this.skipVals = skipVals;
futId = IgniteUuid.randomUuid();
ver = cctx.versions().next();
if (log == null)
log = U.logger(cctx.kernalContext(), logRef, GridPartitionedGetFuture.class);
}
/**
* Initializes future.
*/
public void init() {
AffinityTopologyVersion topVer = this.topVer.topologyVersion() > 0 ? this.topVer : cctx.affinity().affinityTopologyVersion();
map(keys, Collections.<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>>emptyMap(), topVer);
markInitialized();
}
/** {@inheritDoc} */
@Override public boolean trackable() {
return trackable;
}
/** {@inheritDoc} */
@Override public void markNotTrackable() {
// Should not flip trackable flag from true to false since get future can be remapped.
}
/** {@inheritDoc} */
@Override public IgniteUuid futureId() {
return futId;
}
/** {@inheritDoc} */
@Override public GridCacheVersion version() {
return ver;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public Collection<? extends ClusterNode> nodes() {
return
F.viewReadOnly(futures(), new IgniteClosure<IgniteInternalFuture<Map<K, V>>, ClusterNode>() {
@Nullable @Override public ClusterNode apply(IgniteInternalFuture<Map<K, V>> f) {
if (isMini(f))
return ((MiniFuture)f).node();
return cctx.discovery().localNode();
}
});
}
/** {@inheritDoc} */
@Override public boolean onNodeLeft(UUID nodeId) {
for (IgniteInternalFuture<Map<K, V>> fut : futures())
if (isMini(fut)) {
MiniFuture f = (MiniFuture)fut;
if (f.node().id().equals(nodeId)) {
f.onResult(new ClusterTopologyCheckedException("Remote node left grid (will retry): " + nodeId));
return true;
}
}
return false;
}
/**
* @param nodeId Sender.
* @param res Result.
*/
public void onResult(UUID nodeId, GridNearGetResponse res) {
for (IgniteInternalFuture<Map<K, V>> fut : futures())
if (isMini(fut)) {
MiniFuture f = (MiniFuture)fut;
if (f.futureId().equals(res.miniId())) {
assert f.node().id().equals(nodeId);
f.onResult(res);
}
}
}
/** {@inheritDoc} */
@Override public boolean onDone(Map<K, V> res, Throwable err) {
if (super.onDone(res, err)) {
// Don't forget to clean up.
if (trackable)
cctx.mvcc().removeFuture(this);
cache().sendTtlUpdateRequest(expiryPlc);
return true;
}
return false;
}
/**
* @param f Future.
* @return {@code True} if mini-future.
*/
private boolean isMini(IgniteInternalFuture<Map<K, V>> f) {
return f.getClass().equals(MiniFuture.class);
}
/**
* @param keys Keys.
* @param mapped Mappings to check for duplicates.
* @param topVer Topology version on which keys should be mapped.
*/
private void map(
Collection<KeyCacheObject> keys,
Map<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>> mapped,
AffinityTopologyVersion topVer
) {
if (CU.affinityNodes(cctx, topVer).isEmpty()) {
onDone(new ClusterTopologyCheckedException("Failed to map keys for cache " +
"(all partition nodes left the grid)."));
return;
}
Map<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>> mappings =
U.newHashMap(CU.affinityNodes(cctx, topVer).size());
final int keysSize = keys.size();
Map<K, V> locVals = U.newHashMap(keysSize);
boolean hasRmtNodes = false;
// Assign keys to primary nodes.
for (KeyCacheObject key : keys)
hasRmtNodes |= map(key, mappings, locVals, topVer, mapped);
if (isDone())
return;
if (!locVals.isEmpty())
add(new GridFinishedFuture<>(locVals));
if (hasRmtNodes) {
trackable = true;
cctx.mvcc().addFuture(this);
}
// Create mini futures.
for (Map.Entry<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>> entry : mappings.entrySet()) {
final ClusterNode n = entry.getKey();
final LinkedHashMap<KeyCacheObject, Boolean> mappedKeys = entry.getValue();
assert !mappedKeys.isEmpty();
// If this is the primary or backup node for the keys.
if (n.isLocal()) {
final GridDhtFuture<Collection<GridCacheEntryInfo>> fut =
cache().getDhtAsync(n.id(),
-1,
mappedKeys,
readThrough,
reload,
topVer,
subjId,
taskName == null ? 0 : taskName.hashCode(),
expiryPlc,
skipVals);
final Collection<Integer> invalidParts = fut.invalidPartitions();
if (!F.isEmpty(invalidParts)) {
Collection<KeyCacheObject> remapKeys = new ArrayList<>(keysSize);
for (KeyCacheObject key : keys) {
if (key != null && invalidParts.contains(cctx.affinity().partition(key)))
remapKeys.add(key);
}
AffinityTopologyVersion updTopVer = new AffinityTopologyVersion(cctx.discovery().topologyVersion());
assert updTopVer.compareTo(topVer) > 0 : "Got invalid partitions for local node but topology version did " +
"not change [topVer=" + topVer + ", updTopVer=" + updTopVer +
", invalidParts=" + invalidParts + ']';
// Remap recursively.
map(remapKeys, mappings, updTopVer);
}
// Add new future.
add(fut.chain(new C1<IgniteInternalFuture<Collection<GridCacheEntryInfo>>, Map<K, V>>() {
@Override public Map<K, V> apply(IgniteInternalFuture<Collection<GridCacheEntryInfo>> fut) {
try {
return createResultMap(fut.get());
}
catch (Exception e) {
U.error(log, "Failed to get values from dht cache [fut=" + fut + "]", e);
onDone(e);
return Collections.emptyMap();
}
}
}));
}
else {
MiniFuture fut = new MiniFuture(n, mappedKeys, topVer);
GridCacheMessage req = new GridNearGetRequest(
cctx.cacheId(),
futId,
fut.futureId(),
ver,
mappedKeys,
readThrough,
reload,
topVer,
subjId,
taskName == null ? 0 : taskName.hashCode(),
expiryPlc != null ? expiryPlc.forAccess() : -1L,
skipVals);
add(fut); // Append new future.
try {
cctx.io().send(n, req, cctx.ioPolicy());
}
catch (IgniteCheckedException e) {
// Fail the whole thing.
if (e instanceof ClusterTopologyCheckedException)
fut.onResult((ClusterTopologyCheckedException)e);
else
fut.onResult(e);
}
}
}
}
/**
* @param mappings Mappings.
* @param key Key to map.
* @param locVals Local values.
* @param topVer Topology version.
* @param mapped Previously mapped.
* @return {@code True} if has remote nodes.
*/
@SuppressWarnings("ConstantConditions")
private boolean map(
KeyCacheObject key,
Map<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>> mappings,
Map<K, V> locVals,
AffinityTopologyVersion topVer,
Map<ClusterNode, LinkedHashMap<KeyCacheObject, Boolean>> mapped
) {
GridDhtCacheAdapter<K, V> colocated = cache();
boolean remote = false;
// Allow to get cached value from the local node.
boolean allowLocRead = !forcePrimary || cctx.affinity().primary(cctx.localNode(), key, topVer);
while (true) {
GridCacheEntryEx entry = null;
try {
if (!reload && allowLocRead) {
try {
entry = colocated.context().isSwapOrOffheapEnabled() ? colocated.entryEx(key) :
colocated.peekEx(key);
// If our DHT cache do has value, then we peek it.
if (entry != null) {
boolean isNew = entry.isNewLocked();
CacheObject v = entry.innerGet(null,
/*swap*/true,
/*read-through*/false,
/*fail-fast*/true,
/*unmarshal*/true,
/**update-metrics*/false,
/*event*/!skipVals,
/*temporary*/false,
subjId,
null,
taskName,
expiryPlc);
colocated.context().evicts().touch(entry, topVer);
// Entry was not in memory or in swap, so we remove it from cache.
if (v == null) {
if (isNew && entry.markObsoleteIfEmpty(ver))
colocated.removeIfObsolete(key);
}
else {
cctx.addResult(locVals, key, v, skipVals, false, deserializePortable, true);
return false;
}
}
}
catch (GridDhtInvalidPartitionException ignored) {
// No-op.
}
}
ClusterNode node = cctx.affinity().primary(key, topVer);
remote = !node.isLocal();
LinkedHashMap<KeyCacheObject, Boolean> keys = mapped.get(node);
if (keys != null && keys.containsKey(key)) {
if (remapCnt.incrementAndGet() > MAX_REMAP_CNT) {
onDone(new ClusterTopologyCheckedException("Failed to remap key to a new node after " +
MAX_REMAP_CNT + " attempts (key got remapped to the same node) [key=" + key + ", node=" +
U.toShortString(node) + ", mappings=" + mapped + ']'));
return false;
}
}
LinkedHashMap<KeyCacheObject, Boolean> old = mappings.get(node);
if (old == null)
mappings.put(node, old = new LinkedHashMap<>(3, 1f));
old.put(key, false);
break;
}
catch (IgniteCheckedException e) {
onDone(e);
break;
}
catch (GridCacheEntryRemovedException ignored) {
// No-op, will retry.
}
catch (GridCacheFilterFailedException e) {
if (log.isDebugEnabled())
log.debug("Filter validation failed for entry: " + e);
colocated.context().evicts().touch(entry, topVer);
break;
}
}
return remote;
}
/**
* @return Near cache.
*/
private GridDhtCacheAdapter<K, V> cache() {
return cctx.dht();
}
/**
* @param infos Entry infos.
* @return Result map.
*/
private Map<K, V> createResultMap(Collection<GridCacheEntryInfo> infos) {
int keysSize = infos.size();
if (keysSize != 0) {
Map<K, V> map = new GridLeanMap<>(keysSize);
for (GridCacheEntryInfo info : infos)
cctx.addResult(map, info.key(), info.value(), skipVals, false, deserializePortable, false);
return map;
}
return Collections.emptyMap();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridPartitionedGetFuture.class, this, super.toString());
}
/**
* Mini-future for get operations. Mini-futures are only waiting on a single
* node as opposed to multiple nodes.
*/
private class MiniFuture extends GridFutureAdapter<Map<K, V>> {
/** */
private static final long serialVersionUID = 0L;
/** */
private final IgniteUuid futId = IgniteUuid.randomUuid();
/** Node ID. */
private ClusterNode node;
/** Keys. */
@GridToStringInclude
private LinkedHashMap<KeyCacheObject, Boolean> keys;
/** Topology version on which this future was mapped. */
private AffinityTopologyVersion topVer;
/**
* @param node Node.
* @param keys Keys.
* @param topVer Topology version.
*/
MiniFuture(ClusterNode node, LinkedHashMap<KeyCacheObject, Boolean> keys, AffinityTopologyVersion topVer) {
this.node = node;
this.keys = keys;
this.topVer = topVer;
}
/**
* @return Future ID.
*/
IgniteUuid futureId() {
return futId;
}
/**
* @return Node ID.
*/
public ClusterNode node() {
return node;
}
/**
* @return Keys.
*/
public Collection<KeyCacheObject> keys() {
return keys.keySet();
}
/**
* @param e Error.
*/
void onResult(Throwable e) {
if (log.isDebugEnabled())
log.debug("Failed to get future result [fut=" + this + ", err=" + e + ']');
// Fail.
onDone(e);
}
/**
* @param e Failure exception.
*/
@SuppressWarnings("UnusedParameters")
void onResult(ClusterTopologyCheckedException e) {
if (log.isDebugEnabled())
log.debug("Remote node left grid while sending or waiting for reply (will retry): " + this);
final AffinityTopologyVersion updTopVer = new AffinityTopologyVersion(cctx.discovery().topologyVersion());
final GridFutureRemapTimeoutObject timeout = new GridFutureRemapTimeoutObject(this,
cctx.kernalContext().config().getNetworkTimeout(),
updTopVer,
e);
cctx.affinity().affinityReadyFuture(updTopVer).listen(
new CI1<IgniteInternalFuture<AffinityTopologyVersion>>() {
@Override public void apply(IgniteInternalFuture<AffinityTopologyVersion> fut) {
if (timeout.finish()) {
cctx.kernalContext().timeout().removeTimeoutObject(timeout);
// Remap.
map(keys.keySet(), F.t(node, keys), updTopVer);
onDone(Collections.<K, V>emptyMap());
}
}
}
);
cctx.kernalContext().timeout().addTimeoutObject(timeout);
}
/**
* @param res Result callback.
*/
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
void onResult(final GridNearGetResponse res) {
final Collection<Integer> invalidParts = res.invalidPartitions();
// If error happened on remote node, fail the whole future.
if (res.error() != null) {
onDone(res.error());
return;
}
// Remap invalid partitions.
if (!F.isEmpty(invalidParts)) {
AffinityTopologyVersion rmtTopVer = res.topologyVersion();
assert !rmtTopVer.equals(AffinityTopologyVersion.ZERO);
if (rmtTopVer.compareTo(topVer) <= 0) {
// Fail the whole get future.
onDone(new IgniteCheckedException("Failed to process invalid partitions response (remote node reported " +
"invalid partitions but remote topology version does not differ from local) " +
"[topVer=" + topVer + ", rmtTopVer=" + rmtTopVer + ", invalidParts=" + invalidParts +
", nodeId=" + node.id() + ']'));
return;
}
if (log.isDebugEnabled())
log.debug("Remapping mini get future [invalidParts=" + invalidParts + ", fut=" + this + ']');
// Need to wait for next topology version to remap.
IgniteInternalFuture<Long> topFut = cctx.discovery().topologyFuture(rmtTopVer.topologyVersion());
topFut.listen(new CIX1<IgniteInternalFuture<Long>>() {
@SuppressWarnings("unchecked")
@Override public void applyx(IgniteInternalFuture<Long> fut) throws IgniteCheckedException {
AffinityTopologyVersion topVer = new AffinityTopologyVersion(fut.get());
// This will append new futures to compound list.
map(F.view(keys.keySet(), new P1<KeyCacheObject>() {
@Override public boolean apply(KeyCacheObject key) {
return invalidParts.contains(cctx.affinity().partition(key));
}
}), F.t(node, keys), topVer);
onDone(createResultMap(res.entries()));
}
});
}
else
onDone(createResultMap(res.entries()));
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(MiniFuture.class, this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.services;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.apache.usergrid.persistence.Entity;
import org.apache.usergrid.persistence.EntityManager;
import org.apache.usergrid.persistence.EntityRef;
import org.apache.usergrid.persistence.Query;
import org.apache.usergrid.persistence.Results;
import org.apache.usergrid.persistence.Schema;
import org.apache.usergrid.security.shiro.utils.SubjectUtils;
import org.apache.usergrid.services.ServiceParameter.IdParameter;
import org.apache.usergrid.services.ServiceParameter.NameParameter;
import org.apache.usergrid.services.ServiceParameter.QueryParameter;
import org.apache.usergrid.services.ServiceResults.Type;
import org.apache.usergrid.services.exceptions.ServiceInvocationException;
import org.apache.usergrid.services.exceptions.ServiceResourceNotFoundException;
import org.apache.usergrid.services.exceptions.UnsupportedServiceOperationException;
import org.apache.commons.lang.NotImplementedException;
import org.apache.shiro.subject.Subject;
import static org.apache.usergrid.security.shiro.utils.SubjectUtils.getPermissionFromPath;
import static org.apache.usergrid.services.ServiceParameter.filter;
import static org.apache.usergrid.services.ServiceParameter.mergeQueries;
import static org.apache.usergrid.utils.ClassUtils.cast;
import static org.apache.usergrid.utils.InflectionUtils.pluralize;
import static org.apache.usergrid.utils.ListUtils.dequeueCopy;
import static org.apache.usergrid.utils.ListUtils.isEmpty;
/**
* Implementation of Service inferface. Builds a method call fanout from the invoke() method so that overriding specific
* behvaviors can be done easily.
*
* @author edanuff
*/
public abstract class AbstractService implements Service {
private static final Logger logger = LoggerFactory.getLogger( AbstractService.class );
private ServiceInfo info;
protected EntityManager em;
protected ServiceManager sm;
protected Set<String> privateConnections;
protected Set<String> declaredConnections;
protected Set<String> privateCollections;
protected Set<String> declaredCollections;
protected Map<List<String>, List<String>> replaceParameters;
protected Set<String> serviceCommands;
protected Set<EntityDictionaryEntry> entityDictionaries;
protected Set<String> metadataTypes;
protected Set<String> entityCommands;
protected Map<String, Object> defaultEntityMetadata;
public AbstractService() {
}
public void setServiceManager( ServiceManager sm ) {
this.sm = sm;
em = sm.getEntityManager();
}
public ApplicationContext getApplicationContext() {
return sm.getApplicationContext();
}
public void init( ServiceInfo info ) {
this.info = info;
}
public ServiceInfo getServiceInfo() {
return info;
}
@Override
public String getServiceType() {
if ( info == null ) {
return null;
}
return info.getName();
}
@Override
public Class<? extends Entity> getEntityClass() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getEntityType() {
if ( info == null ) {
return null;
}
return info.getItemType();
}
@Override
public boolean isRootService() {
if ( info == null ) {
return false;
}
return info.isRootService();
}
public boolean isCollectionReversed( ServiceContext context ) {
if ( info == null ) {
return false;
}
if ( "application".equals( context.getOwner().getType() ) ) {
return Schema.getDefaultSchema().isCollectionReversed( "application", pluralize( info.getItemType() ) );
}
return Schema.getDefaultSchema().isCollectionReversed( info.getContainerType(), info.getCollectionName() );
}
public String getCollectionSort( ServiceContext context ) {
if ( info == null ) {
return null;
}
if ( "application".equals( context.getOwner().getType() ) ) {
return Schema.getDefaultSchema().getCollectionSort( "application", pluralize( info.getItemType() ) );
}
return Schema.getDefaultSchema().getCollectionSort( info.getContainerType(), info.getCollectionName() );
}
public void makeConnectionPrivate( String connection ) {
if ( privateConnections == null ) {
privateConnections = new LinkedHashSet<String>();
}
privateConnections.add( connection );
}
public void makeConnectionsPrivate( List<String> connections ) {
if ( privateConnections == null ) {
privateConnections = new LinkedHashSet<String>();
}
privateConnections.addAll( connections );
}
public void declareConnection( String connection ) {
if ( declaredConnections == null ) {
declaredConnections = new LinkedHashSet<String>();
}
declaredConnections.add( connection );
}
public void declareConnections( List<String> connections ) {
if ( declaredConnections == null ) {
declaredConnections = new LinkedHashSet<String>();
}
declaredConnections.addAll( connections );
}
public void makeCollectionPrivate( String collection ) {
if ( privateCollections == null ) {
privateCollections = new LinkedHashSet<String>();
}
privateCollections.add( collection );
}
public void makeCollectionsPrivate( List<String> collections ) {
if ( privateCollections == null ) {
privateCollections = new LinkedHashSet<String>();
}
privateCollections.addAll( collections );
}
public void declareVirtualCollection( String collection ) {
if ( declaredCollections == null ) {
declaredCollections = new LinkedHashSet<String>();
}
declaredCollections.add( collection );
}
public void declareVirtualCollections( List<String> collections ) {
if ( declaredCollections == null ) {
declaredCollections = new LinkedHashSet<String>();
}
declaredCollections.addAll( collections );
}
public void addReplaceParameters( List<String> find, List<String> replace ) {
if ( replaceParameters == null ) {
replaceParameters = new LinkedHashMap<List<String>, List<String>>();
}
replaceParameters.put( find, replace );
}
public void declareServiceCommands( String command ) {
if ( serviceCommands == null ) {
serviceCommands = new LinkedHashSet<String>();
}
serviceCommands.add( command );
}
public void declareServiceCommands( List<String> commands ) {
if ( serviceCommands == null ) {
serviceCommands = new LinkedHashSet<String>();
}
serviceCommands.addAll( commands );
}
public void declareEntityDictionary( EntityDictionaryEntry dictionary ) {
if ( entityDictionaries == null ) {
entityDictionaries = new LinkedHashSet<EntityDictionaryEntry>();
}
entityDictionaries.add( dictionary );
}
public void declareEntityDictionary( String dictionary ) {
if ( entityDictionaries == null ) {
entityDictionaries = new LinkedHashSet<EntityDictionaryEntry>();
}
entityDictionaries.add( new EntityDictionaryEntry( dictionary ) );
}
public void declareEntityDictionaries( List<String> dictionaries ) {
if ( entityDictionaries == null ) {
entityDictionaries = new LinkedHashSet<EntityDictionaryEntry>();
}
for ( String dict : dictionaries ) {
entityDictionaries.add( new EntityDictionaryEntry( dict ) );
}
}
public void declareMetadataType( String type ) {
if ( metadataTypes == null ) {
metadataTypes = new LinkedHashSet<String>();
}
metadataTypes.add( type );
}
public void declareMetadataTypes( List<String> typeList ) {
if ( metadataTypes == null ) {
metadataTypes = new LinkedHashSet<String>();
}
metadataTypes.addAll( typeList );
}
public void declareEntityCommand( String command ) {
if ( entityCommands == null ) {
entityCommands = new LinkedHashSet<String>();
}
entityCommands.add( command );
}
public void declareEntityCommands( List<String> commands ) {
if ( entityCommands == null ) {
entityCommands = new LinkedHashSet<String>();
}
entityCommands.addAll( commands );
}
@Override
public Entity getEntity( ServiceRequest request, UUID uuid ) throws Exception {
// TODO Auto-generated method stub
return null;
}
@Override
public Entity getEntity( ServiceRequest request, String name ) throws Exception {
// TODO Auto-generated method stub
return null;
}
public Entity importEntity( ServiceContext context, Entity entity ) throws Exception {
return importEntity( context.getRequest(), entity );
}
@Override
public Entity importEntity( ServiceRequest request, Entity entity ) throws Exception {
if ( entity == null ) {
return null;
}
if ( !isRootService() ) {
return sm.importEntity( request, entity );
}
String path = request.getPath() + "/" + entity.getUuid();
Map<String, Object> metadata = new LinkedHashMap<String, Object>();
metadata.put( "path", path );
if ( defaultEntityMetadata != null ) {
metadata.putAll( defaultEntityMetadata );
}
Set<Object> connections = getConnectedTypesSet( entity );
if ( connections != null ) {
Map<String, Object> m = new LinkedHashMap<String, Object>();
for ( Object n : connections ) {
m.put( n.toString(), path + "/" + n );
}
metadata.put( "connections", m );
}
Set<Object> connecting = getConnectingTypesSet( entity );
if ( connecting != null ) {
Map<String, Object> m = new LinkedHashMap<String, Object>();
for ( Object n : connecting ) {
m.put( n.toString(), path + "/connecting/" + n );
}
metadata.put( "connecting", m );
}
Set<String> collections = getCollectionSet( entity );
if ( collections != null ) {
Map<String, Object> m = new LinkedHashMap<String, Object>();
for ( Object n : collections ) {
m.put( n.toString(), path + "/" + n );
}
metadata.put( "collections", m );
}
if ( entityDictionaries != null ) {
Map<String, Object> m = new LinkedHashMap<String, Object>();
for ( EntityDictionaryEntry dict : entityDictionaries ) {
m.put( dict.getName(), path + "/" + dict.getPath() );
}
metadata.put( "sets", m );
}
if ( metadata.size() > 0 ) {
entity.mergeMetadata( metadata );
}
return entity;
}
public void importEntities( ServiceRequest request, Results results ) throws Exception {
List<Entity> entities = results.getEntities();
if ( entities != null ) {
for ( Entity entity : entities ) {
Entity imported = importEntity( request, entity );
if ( imported != entity ) {
logger.debug( "Import returned new entity instace for {} replacing in results set",
entity.getUuid() );
results.replace( imported );
}
}
}
}
public void importEntities( ServiceContext context, Results results ) throws Exception {
importEntities( context.getRequest(), results );
}
@Override
public Entity writeEntity( ServiceRequest request, Entity entity ) throws Exception {
if ( !isRootService() ) {
return sm.writeEntity( request, entity );
}
return entity;
}
public void writeEntities( ServiceRequest request, Results results ) throws Exception {
List<Entity> entities = results.getEntities();
if ( entities != null ) {
for ( Entity entity : entities ) {
writeEntity( request, entity );
}
}
}
public Entity updateEntity( ServiceContext context, EntityRef ref, ServicePayload payload ) throws Exception {
return updateEntity( context.getRequest(), ref, payload );
}
public Entity updateEntity( ServiceContext context, EntityRef ref ) throws Exception {
return updateEntity( context.getRequest(), ref, context.getPayload() );
}
@Override
public Entity updateEntity( ServiceRequest request, EntityRef ref, ServicePayload payload ) throws Exception {
if ( !isRootService() ) {
return sm.updateEntity( request, ref, payload );
}
if ( ref instanceof Entity ) {
Entity entity = ( Entity ) ref;
em.updateProperties( entity, payload.getProperties() );
entity.addProperties( payload.getProperties() );
return entity;
}
logger.error( "Attempted update of entity reference rather than full entity, currently unsupport - MUSTFIX" );
throw new NotImplementedException();
}
public void updateEntities( ServiceContext context, Results results, ServicePayload payload ) throws Exception {
updateEntities( context.getRequest(), results, payload );
}
public void updateEntities( ServiceContext context, Results results ) throws Exception {
updateEntities( context.getRequest(), results, context.getPayload() );
}
public void updateEntities( ServiceRequest request, Results results, ServicePayload payload ) throws Exception {
List<Entity> entities = results.getEntities();
if ( entities != null ) {
for ( Entity entity : entities ) {
updateEntity( request, entity, payload );
}
}
}
public Set<Object> getConnectedTypesSet( EntityRef ref ) throws Exception {
Set<Object> connections = em.getDictionaryAsSet( ref, Schema.DICTIONARY_CONNECTED_TYPES );
if ( connections == null ) {
return null;
}
if ( connections.size() > 0 ) {
connections.remove( "connection" );
if ( privateConnections != null ) {
connections.removeAll( privateConnections );
}
if ( connections.size() > 0 ) {
return new LinkedHashSet<Object>( connections );
}
}
return null;
}
public Set<Object> getConnectingTypesSet( EntityRef ref ) throws Exception {
Set<Object> connections = em.getDictionaryAsSet( ref, Schema.DICTIONARY_CONNECTING_TYPES );
if ( connections == null ) {
return null;
}
if ( connections.size() > 0 ) {
connections.remove( "connection" );
if ( privateConnections != null ) {
connections.removeAll( privateConnections );
}
if ( connections.size() > 0 ) {
return new LinkedHashSet<Object>( connections );
}
}
return null;
}
public Set<String> getCollectionSet( EntityRef ref ) {
Set<String> set = Schema.getDefaultSchema().getCollectionNames( ref.getType() );
set = new LinkedHashSet<String>( set );
if ( declaredCollections != null ) {
set.addAll( declaredCollections );
}
if ( privateCollections != null ) {
set.removeAll( privateCollections );
}
if ( set.size() > 0 ) {
return set;
}
return null;
}
@Override
public ServiceResults invoke( ServiceAction action, ServiceRequest request, ServiceResults previousResults,
ServicePayload payload ) throws Exception {
ServiceContext context = getContext( action, request, previousResults, payload );
return invoke( context );
}
/**
* Create context from parameter queue. Returns context containing a query object that represents the parameters in
* the queue. Remaining parameters are left for next service request to allow for request chaining.
*/
public ServiceContext getContext( ServiceAction action, ServiceRequest request, ServiceResults previousResults,
ServicePayload payload ) throws Exception {
EntityRef owner = request.getOwner();
String collectionName =
"application".equals( owner.getType() ) ? pluralize( info.getItemType() ) : info.getCollectionName();
List<ServiceParameter> parameters = filter( request.getParameters(), replaceParameters );
ServiceParameter first_parameter = null;
if ( !isEmpty( parameters ) ) {
first_parameter = parameters.get( 0 );
parameters = dequeueCopy( parameters );
}
if ( first_parameter instanceof NameParameter ) {
if ( hasServiceMetadata( first_parameter.getName() ) ) {
return new ServiceContext( this, action, request, previousResults, owner, collectionName, parameters,
payload ).withServiceMetadata( first_parameter.getName() );
}
else if ( hasServiceCommand( first_parameter.getName() ) ) {
return new ServiceContext( this, action, request, previousResults, owner, collectionName, parameters,
payload ).withServiceCommand( first_parameter.getName() );
}
}
Query query = null;
if ( first_parameter instanceof QueryParameter ) {
query = first_parameter.getQuery();
}
parameters = mergeQueries( query, parameters );
if ( first_parameter instanceof IdParameter ) {
UUID id = first_parameter.getId();
return new ServiceContext( this, action, request, previousResults, owner, collectionName,
Query.fromUUID( id ), parameters, payload );
}
else if ( first_parameter instanceof NameParameter ) {
String name = first_parameter.getName();
return new ServiceContext( this, action, request, previousResults, owner, collectionName,
Query.fromIdentifier( name ), parameters, payload );
}
else if ( query != null ) {
return new ServiceContext( this, action, request, previousResults, owner, collectionName, query, parameters,
payload );
}
else if ( first_parameter == null ) {
return new ServiceContext( this, action, request, previousResults, owner, collectionName, null, null,
payload );
}
return null;
}
public ServiceResults invoke( ServiceContext context ) throws Exception {
ServiceResults results = null;
String metadataType = checkForServiceMetadata( context );
if ( metadataType != null ) {
return handleServiceMetadata( context, metadataType );
}
String serviceCommand = checkForServiceCommand( context );
if ( serviceCommand != null ) {
return handleServiceCommand( context, serviceCommand );
}
EntityDictionaryEntry entityDictionary = checkForEntityDictionaries( context );
String entityCommand = checkForEntityCommands( context );
if ( context.isByQuery() ) {
results = invokeItemsWithQuery( context, context.getQuery() );
}
else if ( context.isByName() ) {
results = invokeItemWithName( context, context.getName() );
}
else if ( context.isByUuid() ) {
results = invokeItemWithId( context, context.getUuid() );
}
else {
results = invokeCollection( context );
}
results = handleEntityDictionary( context, results, entityDictionary );
results = handleEntityCommand( context, results, entityCommand );
return results;
}
public ServiceResults invokeItemWithId( ServiceContext context, UUID id ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getItemById( context, id );
case POST:
return postItemById( context, id );
case PUT:
return putItemById( context, id );
case DELETE:
return deleteItemById( context, id );
case HEAD:
return headItemById( context, id );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults invokeItemWithName( ServiceContext context, String name ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getItemByName( context, name );
case POST:
return postItemByName( context, name );
case PUT:
return putItemByName( context, name );
case DELETE:
return deleteItemByName( context, name );
case HEAD:
return headItemByName( context, name );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults invokeItemsWithQuery( ServiceContext context, Query query ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getItemsByQuery( context, query );
case POST:
return postItemsByQuery( context, query );
case PUT:
return putItemsByQuery( context, query );
case DELETE:
return deleteItemsByQuery( context, query );
case HEAD:
return headItemsByQuery( context, query );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults invokeCollection( ServiceContext context ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getCollection( context );
case POST:
return postCollection( context );
case PUT:
return putCollection( context );
case DELETE:
return deleteCollection( context );
case HEAD:
return headCollection( context );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults getItemById( ServiceContext context, UUID id ) throws Exception {
throw new ServiceResourceNotFoundException( context );
}
public ServiceResults getItemByName( ServiceContext context, String name ) throws Exception {
throw new ServiceResourceNotFoundException( context );
}
public ServiceResults getItemsByQuery( ServiceContext context, Query query ) throws Exception {
throw new ServiceResourceNotFoundException( context );
}
public ServiceResults getCollection( ServiceContext context ) throws Exception {
throw new ServiceResourceNotFoundException( context );
}
public ServiceResults putItemById( ServiceContext context, UUID id ) throws Exception {
return getItemById( context, id );
}
public ServiceResults putItemByName( ServiceContext context, String name ) throws Exception {
return getItemByName( context, name );
}
public ServiceResults putItemsByQuery( ServiceContext context, Query query ) throws Exception {
return getItemsByQuery( context, query );
}
public ServiceResults putCollection( ServiceContext context ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults postItemById( ServiceContext context, UUID id ) throws Exception {
return getItemById( context, id );
}
public ServiceResults postItemByName( ServiceContext context, String name ) throws Exception {
return getItemByName( context, name );
}
public ServiceResults postItemsByQuery( ServiceContext context, Query query ) throws Exception {
return getItemsByQuery( context, query );
}
public ServiceResults postCollection( ServiceContext context ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults deleteItemById( ServiceContext context, UUID id ) throws Exception {
return getItemById( context, id );
}
public ServiceResults deleteItemByName( ServiceContext context, String name ) throws Exception {
return getItemByName( context, name );
}
public ServiceResults deleteItemsByQuery( ServiceContext context, Query query ) throws Exception {
return getItemsByQuery( context, query );
}
public ServiceResults deleteCollection( ServiceContext context ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults headItemById( ServiceContext context, UUID id ) throws Exception {
return getItemById( context, id );
}
public ServiceResults headItemByName( ServiceContext context, String name ) throws Exception {
return getItemByName( context, name );
}
public ServiceResults headItemsByQuery( ServiceContext context, Query query ) throws Exception {
return getItemsByQuery( context, query );
}
public ServiceResults headCollection( ServiceContext context ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public boolean hasServiceCommand( String command ) {
return ( serviceCommands != null ) && ( command != null ) && serviceCommands.contains( command );
}
public String checkForServiceCommand( ServiceContext context ) {
if ( serviceCommands == null ) {
return null;
}
if ( !context.moreParameters() ) {
return null;
}
String name = null;
if ( context.firstParameterIsName() ) {
name = context.firstParameter().getName();
if ( serviceCommands.contains( name ) ) {
return name;
}
}
return null;
}
public ServiceResults handleServiceCommand( ServiceContext context, String command ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getServiceCommand( context, command );
case POST:
return postServiceCommand( context, command, context.getPayload() );
case PUT:
return putServiceCommand( context, command, context.getPayload() );
case DELETE:
return deleteServiceCommand( context, command );
case HEAD:
return headServiceCommand( context, command );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults getServiceCommand( ServiceContext context, String command ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults putServiceCommand( ServiceContext context, String command, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults postServiceCommand( ServiceContext context, String command, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults deleteServiceCommand( ServiceContext context, String command ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults headServiceCommand( ServiceContext context, String command ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public boolean hasEntityDictionary( String dictionary ) {
if ( entityDictionaries == null ) {
return false;
}
for ( EntityDictionaryEntry entry : entityDictionaries ) {
if ( entry.getName().equalsIgnoreCase( dictionary ) ) {
return true;
}
}
return false;
}
public EntityDictionaryEntry checkForEntityDictionaries( ServiceContext context ) {
if ( entityDictionaries == null ) {
return null;
}
if ( !context.moreParameters() ) {
return null;
}
String name = null;
if ( context.firstParameterIsName() ) {
name = context.firstParameter().getName();
for ( EntityDictionaryEntry entry : entityDictionaries ) {
if ( entry.getName().equalsIgnoreCase( name ) ) {
return entry;
}
}
}
return null;
}
public ServiceResults handleEntityDictionary( ServiceContext context, ServiceResults results,
EntityDictionaryEntry dictionary ) throws Exception {
if ( dictionary != null ) {
if ( results.size() == 1 ) {
results = handleEntityDictionary( context, results.getRef(), dictionary );
}
else if ( results.size() > 1 ) {
results = handleEntityDictionary( context, results.getRefs(), dictionary );
}
}
return results;
}
public ServiceResults handleEntityDictionary( ServiceContext context, EntityRef ref,
EntityDictionaryEntry dictionary ) throws Exception {
if ( ref == null ) {
throw new UnsupportedServiceOperationException( context );
}
List<EntityRef> refs = new ArrayList<EntityRef>();
refs.add( ref );
return handleEntityDictionary( context, refs, dictionary );
}
public ServiceResults handleEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary ) throws Exception {
if ( ( refs == null ) || ( refs.size() == 0 ) ) {
throw new UnsupportedServiceOperationException( context );
}
switch ( context.getAction() ) {
case GET:
return getEntityDictionary( context, refs, dictionary );
case POST:
return postEntityDictionary( context, refs, dictionary, context.getPayload() );
case PUT:
return putEntityDictionary( context, refs, dictionary, context.getPayload() );
case DELETE:
return deleteEntityDictionary( context, refs, dictionary );
case HEAD:
return headEntityDictionary( context, refs, dictionary );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults getEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary ) throws Exception {
for ( EntityDictionaryEntry entry : entityDictionaries ) {
if ( entry.getName().equalsIgnoreCase( dictionary.getName() ) ) {
EntityRef entityRef = refs.get( 0 );
checkPermissionsForEntitySubPath( context, entityRef, entry.getPath() );
Set<String> items = cast( em.getDictionaryAsSet( entityRef, entry.getName() ) );
return new ServiceResults( this, context, Type.GENERIC, Results.fromData( items ), null, null );
}
}
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults putEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults postEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults deleteEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults headEntityDictionary( ServiceContext context, List<EntityRef> refs,
EntityDictionaryEntry dictionary ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public boolean hasEntityCommand( String command ) {
return ( entityCommands != null ) && ( command != null ) && entityCommands.contains( command );
}
public String checkForEntityCommands( ServiceContext context ) {
if ( entityCommands == null ) {
return null;
}
if ( !context.moreParameters() ) {
return null;
}
String name = null;
if ( context.firstParameterIsName() ) {
name = context.firstParameter().getName();
if ( entityCommands.contains( name ) ) {
return name;
}
}
return null;
}
public ServiceResults handleEntityCommand( ServiceContext context, ServiceResults results, String command )
throws Exception {
if ( command != null ) {
if ( results.size() == 1 ) {
results = handleEntityCommand( context, results.getRef(), command );
}
else if ( results.size() > 1 ) {
results = handleEntityCommand( context, results.getRefs(), command );
}
}
return results;
}
public ServiceResults handleEntityCommand( ServiceContext context, EntityRef ref, String command )
throws Exception {
if ( ref == null ) {
throw new UnsupportedServiceOperationException( context );
}
List<EntityRef> refs = new ArrayList<EntityRef>();
refs.add( ref );
return handleEntityCommand( context, refs, command );
}
public ServiceResults handleEntityCommand( ServiceContext context, List<EntityRef> refs, String command )
throws Exception {
if ( ( refs == null ) || ( refs.size() == 0 ) ) {
throw new UnsupportedServiceOperationException( context );
}
switch ( context.getAction() ) {
case GET:
return getEntityCommand( context, refs, command );
case POST:
return postEntityCommand( context, refs, command, context.getPayload() );
case PUT:
return putEntityCommand( context, refs, command, context.getPayload() );
case DELETE:
return deleteEntityCommand( context, refs, command );
case HEAD:
return headEntityCommand( context, refs, command );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults getEntityCommand( ServiceContext context, List<EntityRef> refs, String command )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults putEntityCommand( ServiceContext context, List<EntityRef> refs, String command,
ServicePayload payload ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults postEntityCommand( ServiceContext context, List<EntityRef> refs, String command,
ServicePayload payload ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults deleteEntityCommand( ServiceContext context, List<EntityRef> refs, String command )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults headEntityCommand( ServiceContext context, List<EntityRef> refs, String command )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public boolean hasServiceMetadata( String metadata ) {
return ( metadataTypes != null ) && ( metadata != null ) && metadataTypes.contains( metadata );
}
public String checkForServiceMetadata( ServiceContext context ) {
if ( metadataTypes == null ) {
return null;
}
if ( context.getServiceMetadata() == null ) {
return null;
}
if ( metadataTypes.contains( context.getServiceMetadata() ) ) {
return context.getServiceMetadata();
}
return null;
}
public ServiceResults handleServiceMetadata( ServiceContext context, String metadataType ) throws Exception {
switch ( context.getAction() ) {
case GET:
return getServiceMetadata( context, metadataType );
case POST:
return postServiceMetadata( context, metadataType, context.getPayload() );
case PUT:
return putServiceMetadata( context, metadataType, context.getPayload() );
case DELETE:
return deleteServiceMetadata( context, metadataType );
case HEAD:
return headServiceMetadata( context, metadataType );
}
throw new ServiceInvocationException( context, "Request action unhandled " + context.getAction() );
}
public ServiceResults getServiceMetadata( ServiceContext context, String metadataType ) throws Exception {
if ( metadataTypes.contains( metadataType ) ) {
// return new ServiceResults(this, context, Type.GENERIC,
// Results.fromData(items), null, null);
}
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults putServiceMetadata( ServiceContext context, String metadataType, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults postServiceMetadata( ServiceContext context, String metadataType, ServicePayload payload )
throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults deleteServiceMetadata( ServiceContext context, String metadataType ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public ServiceResults headServiceMetadata( ServiceContext context, String metadataType ) throws Exception {
throw new UnsupportedServiceOperationException( context );
}
public void checkPermissionsForCollection( ServiceContext context ) {
String path = context.getPath();
checkPermissionsForPath( context, path );
}
public void checkPermissionsForEntity( ServiceContext context, UUID entityId ) {
String path = context.getPath( entityId );
checkPermissionsForPath( context, path );
}
public void checkPermissionsForEntity( ServiceContext context, EntityRef entity ) {
String path = context.getPath( entity );
checkPermissionsForPath( context, path );
}
public void checkPermissionsForEntitySubPath( ServiceContext context, UUID entityId, String subPath ) {
String path = context.getPath( entityId, subPath );
checkPermissionsForPath( context, path );
}
public void checkPermissionsForEntitySubPath( ServiceContext context, EntityRef entity, String subPath ) {
String path = context.getPath( entity, subPath );
checkPermissionsForPath( context, path );
}
public void checkPermissionsForPath( ServiceContext context, String path ) {
Subject currentUser = SubjectUtils.getSubject();
if ( currentUser == null ) {
return;
}
String perm =
getPermissionFromPath( em.getApplicationRef().getUuid(), context.getAction().toString().toLowerCase(),
path );
boolean permitted = currentUser.isPermitted( perm );
if ( logger.isDebugEnabled() ) {
logger.debug( PATH_MSG, new Object[] { path, context.getAction(), perm, permitted } );
}
SubjectUtils.checkPermission( perm );
Subject subject = SubjectUtils.getSubject();
logger.debug( "Checked subject {} for perm {}", subject != null ? subject.toString() : "", perm );
logger.debug( "------------------------------------------------------------------------------" );
}
private static final String PATH_MSG =
"---- Checked permissions for path --------------------------------------------\n" + "Requested path: {} \n"
+ "Requested action: {} \n" + "Requested permission: {} \n" + "Permitted: {} \n";
/** Purpose is to enable entity dictionary entries to have name not equal to path segment. */
protected static class EntityDictionaryEntry {
private String name;
private String path; // path segment used in URL
public EntityDictionaryEntry( String name ) {
this.name = this.path = name;
}
public EntityDictionaryEntry( String name, String path ) {
this.name = name;
this.path = path;
}
public String getName() {
return name;
}
public String getPath() {
return path;
}
}
}
| |
package com.wordsaretoys.quencher.scores;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.Region.Op;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Message;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.GestureDetector.OnDoubleTapListener;
import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.View;
import android.widget.OverScroller;
import com.wordsaretoys.quencher.R;
import com.wordsaretoys.quencher.audio.Engine;
import com.wordsaretoys.quencher.common.Notifier;
import com.wordsaretoys.quencher.common.Notifier.NotificationListener;
import com.wordsaretoys.quencher.data.Note;
import com.wordsaretoys.quencher.data.Scale;
import com.wordsaretoys.quencher.data.Score;
import com.wordsaretoys.quencher.data.Track;
public class ComposerView extends View implements
OnGestureListener, OnDoubleTapListener, NotificationListener {
static final String TAG = "ComposerView";
// goto position
public enum Goto {
Start, End, Cursor, Marker
}
// basic drawing parameters
static final int TimeBarHeight = 64;
static final int GridSize = 1;
static final int GapSize = 4;
static final int CursorSize = 8;
static final int AnnotationSize = 24;
static final int NoteButtonSize = 64;
static final int NoteButtonPad = 4;
// derived drawing parameters
static final int TimeLineTop = TimeBarHeight / 2;
static final int AnnotationTop = GapSize;
static final int NoteTop = AnnotationTop + AnnotationSize + GapSize;
static final int CursorTop = NoteTop + NoteButtonSize + 2 * GapSize;
static final int TrackHeight = CursorTop + CursorSize + GapSize + GridSize;
static final int NoteWidth = NoteButtonSize + 2 * GapSize + GridSize;
static final int BeatWidth = NoteWidth;
static final int NoteCenter = NoteButtonSize / 2;
static final int NoteTextArea = NoteButtonSize - 2 * NoteButtonPad;
final Resources Res = getResources();
// color resources
int colorWhite = Res.getColor(R.color.white);
int colorBlack = Res.getColor(R.color.black);
int colorClouds = Res.getColor(R.color.clouds);
int colorEmerald = Res.getColor(R.color.emerald);
int colorCarrot = Res.getColor(R.color.carrot);
int colorSunflower = Res.getColor(R.color.sunflower);
int colorLtGray0 = Res.getColor(R.color.ltgray0);
int colorLtGray1 = Res.getColor(R.color.ltgray1);
int colorLtGray2 = Res.getColor(R.color.ltgray2);
int colorLtGray3 = Res.getColor(R.color.ltgray3);
int colorMdGray1 = Res.getColor(R.color.mdgray1);
int colorFaintBlu = Res.getColor(R.color.faintblu);
int colorFaintYello = Res.getColor(R.color.faintyello);
int colorLtBlue1 = Res.getColor(R.color.ltblue1);
// score editor background color
final int BackingColor = colorClouds;
// note button background color
final int ButtonBackingColor = colorLtGray1;
// note button text color
final int NoteButtonTextColor = colorBlack;
// time bar background color
final int TimeBarBackingColor = colorFaintYello;
// time bar text color
final int TimeBarTextColor = colorBlack;
// note focus highlight color
final int NoteFocusColor = colorBlack;
// focused track color
final int TrackFocusColor = colorFaintBlu;
// play marker color
final int PlayMarkerColor = colorBlack;
// annotation text color
final int AnnotationColor = colorBlack;
// grid line color
final int GridLineColor = colorLtGray2;
// note selection color
final int SelectionColor = colorCarrot;
// bar note backing color
final int BarBackingColor = colorLtGray3;
// note playing color
final int NotePlayingColor = colorEmerald;
// note button pressed color
final int NotePressedColor = colorLtBlue1;
// string resources
String stringMuted = Res.getString(R.string.trackMuted);
// used to generate strings for display without heap allocations
StringBuilder stringer = new StringBuilder();
char[] charBuffer = new char[256];
// gesture detectors
GestureDetector gestureDetector;
// overscroller handles fling animations and overscroll notifications
OverScroller scroller;
// paint objects for all drawing
Paint fillBrush, lineBrush, textBrush;
// beat marker bitmap
Bitmap beatMarkerBmp;
// scroll offsets
float scrollX, scrollY;
// last track and slot touched
int trackTouched, slotTouched;
// rect object for drawing roundrects
RectF rect = new RectF();
// indicates audio is playing back
boolean playback;
// elapsed playback time in decimal seconds
float playbackTime;
// beat scaling factor across tracks
float beatScale;
// note undergoing press indicator flag
boolean pressing;
/**
* default ctor, required by layout inflator
* @param context
* @param attrs
*/
public ComposerView(Context context, AttributeSet attrs) {
super(context, attrs);
setBackgroundColor(BackingColor);
// set up the paint objects
fillBrush = new Paint();
fillBrush.setStyle(Style.FILL);
lineBrush = new Paint();
lineBrush.setStyle(Style.STROKE);
textBrush = new Paint();
textBrush.setTypeface(Typeface.SANS_SERIF);
textBrush.setAntiAlias(true);
gestureDetector = new GestureDetector(context, this);
gestureDetector.setOnDoubleTapListener(this);
scroller = new OverScroller(context);
beatMarkerBmp = BitmapFactory.decodeResource(
getResources(), R.drawable.ic_beat_marker);
playback = Engine.INSTANCE.isPlaying();
onSettingsChanged();
}
/**
* write state to a bundle
*/
public void saveState(Bundle b) {
Bundle c = new Bundle();
c.putFloat("scrollX", scrollX);
c.putFloat("scrollY", scrollY);
b.putBundle("composerView", c);
}
/**
* load saved state from a bundle
*/
public void loadState(Bundle b) {
Bundle c = b.getBundle("composerView");
scrollX = c.getFloat("scrollX");
scrollY = c.getFloat("scrollY");
enforceScrollLimits();
}
/**
* react to a change in the persistent settings
*/
private void onSettingsChanged() {
enforceScrollLimits();
postInvalidate();
}
/**
* adjust/check drawing parameters post-rotation
*/
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
enforceScrollLimits();
updateBeatScale();
}
/**
* generates a time string in the format HH:MM:SS.SSS
* places result in the stringer object and the char buffer
*
* @param t decimal time in seconds
*/
void generateTimeString(float t) {
ScoreCommon.buildTimeString(t, stringer);
if (stringer.length() > charBuffer.length) {
charBuffer = new char[stringer.length()];
}
stringer.getChars(0, stringer.length(), charBuffer, 0);
}
/**
* generates a track annotation (scale, voice, volume)
* places result in stringer object and char buffer
*
* @param track annotating track
*/
void buildLeftAnnotation(Track track) {
stringer.setLength(0);
stringer.append(track.getScale().getName());
if (stringer.length() > 0) {
stringer.append(" / ");
}
stringer.append(track.getVoice().getName());
if (stringer.length() > charBuffer.length) {
charBuffer = new char[stringer.length()];
}
stringer.getChars(0, stringer.length(), charBuffer, 0);
}
/**
* generates a track annotation (scale, voice, volume)
* places result in stringer object and char buffer
*
* @param track annotating track
*/
void buildRightAnnotation(Track track) {
stringer.setLength(0);
if (!track.isMuted()) {
// goofy but simple way to create a "meter"
int vol = (int)(track.getVolume() * 10);
for (int i = 0; i < vol; i++) {
stringer.append("I");
}
} else {
stringer.append(stringMuted);
}
if (stringer.length() > charBuffer.length) {
charBuffer = new char[stringer.length()];
}
stringer.getChars(0, stringer.length(), charBuffer, 0);
}
/**
* check current scroll settings and clip when necessary
*
* @return true if scroll was clipped
*/
private boolean enforceScrollLimits() {
ScoreCommon common = ScoreActivity.common;
boolean clipped = false;
if (scrollX < 0) {
scrollX = 0;
clipped = true;
}
if (scrollY < 0) {
scrollY = 0;
clipped = true;
}
int tracks = common.getScore().getTrackCount();
int ylimit = (int)(tracks * TrackHeight) - getHeight() + TimeBarHeight;
ylimit = Math.max(ylimit, 0);
if (scrollY > ylimit) {
scrollY = ylimit;
clipped = true;
}
return clipped;
}
/**
* find the beat corresponding to the screen position
* corrects for scroll and track listing
*
* @param x screen x coordinate
* @return decimal position of left edge of beat
*/
private float screenToBeat(float x) {
return (float)(x + scrollX) / (float) (BeatWidth * beatScale);
}
/**
* find the screen coordinate of the specified beat
* corrects for scroll and track listing
*
* @param b decimal beat
* @return screen x coordinate of left edge
*/
private float beatToScreen(float b) {
return b * BeatWidth * beatScale - scrollX;
}
/**
* find the track index corresponding to the screen position
* corrects for scroll and time bar
*
* @param y screen y coordinate
* @return track index
*/
private int screenToTrack(float y) {
return (int)((y + scrollY - TimeBarHeight) / TrackHeight);
}
/**
* find the screen coordinate of the specified track index
* corrects for scroll and time bar
*
* @param t track index
* @return y coordinate of top edge of track
*/
private float trackToScreen(int t) {
return t * TrackHeight - scrollY + TimeBarHeight;
}
/**
* finds the note index corresponding to the screen position
*
* @param x screen x coordinate
* @param scale track scale
* @return note index
*/
private int screenToNote(float x, float scale) {
return (int)((x + scrollX) / (scale * beatScale * NoteWidth));
}
/**
* find the note index corresponding to the screen position
* corrects for scroll and track list
*
* @param n note index
* @param scale track scale
* @return screen x coordinate
*/
private float noteToScreen(int n, float scale) {
return n * scale * beatScale * NoteWidth - scrollX;
}
/**
* get the vertical center for drawing a line of text
* @param p paint object with valid text settings
* @param y0 top of drawing region
* @param y1 bottom of drawing region
* @return y-coordinate of text center
*/
private float midText(Paint p, float y0, float y1) {
return (y0 + y1 - p.ascent() - p.descent()) * 0.5f;
}
/**
* get editable state of composer
* @return true if notes should be editable
*/
public boolean isEditable() {
return !(playback || ScoreActivity.common.isSelecting());
}
/**
* scroll or jump to the specified position
*
* @param where code for standard position
*/
public void goTo(Goto where) {
scroller.forceFinished(true);
ScoreCommon common = ScoreActivity.common;
float x;
switch(where) {
case Start:
scrollX = 0;
break;
case End:
Note note = common.getLastNote();
if (note != null) {
x = noteToScreen(
note.getIndex() + 1,
note.getTrack().getTiming());
scrollX = Math.max(0, scrollX + x - getWidth() * 0.5f);
} else {
// no notes in scroll, go to start
scrollX = 0;
}
break;
case Cursor:
x = noteToScreen(
common.getNotePosition(),
common.getFocusedTrack().getTiming());
scrollX = Math.max(0, scrollX + x - getWidth() * 0.5f);
break;
case Marker:
x = beatToScreen(common.getBeatMarker());
scrollX = Math.max(0, scrollX + x - getWidth() * 0.5f);
break;
}
postInvalidate();
}
/**
* scroll or jump to specified position
* @param time decimal seconds to move to
*/
public void goTo(float time) {
scroller.forceFinished(true);
float b = time * (float) ScoreActivity.common.getScore().getTempo() / 60f;
float x = beatToScreen(b);
scrollX = Math.max(0, scrollX + x - getWidth() * 0.5f);
postInvalidate();
}
/**
* get current X scroll as time value
* @return scrollX in decimal seconds
*/
public float getScrollTime() {
float b = screenToBeat(0);
return 60f * (float) b / (float) ScoreActivity.common.getScore().getTempo();
}
/**
* draw the custom view
*/
protected void onDraw(Canvas canvas) {
ScoreCommon common = ScoreActivity.common;
Score score = common.getScore();
// find beat/track drawing limits
int firstBeat = (int) screenToBeat(0);
int lastBeat = (int) screenToBeat(getWidth());
int firstTrack = screenToTrack(0);
int lastTrack = screenToTrack(getHeight());
int trackCount = score.getTrackCount();
// find focused elements
int editTrackPos = common.getTrackPosition();
int editNotePos = common.getNotePosition();
// draw the track highlight under everything else
if (!playback) {
fillBrush.setColor(TrackFocusColor);
float focusTop = trackToScreen(editTrackPos);
canvas.drawRect(0, focusTop, getWidth(), focusTop + TrackHeight, fillBrush);
}
// fill in the time bar
canvas.clipRect(0, 0, getWidth(), TimeBarHeight, Op.REPLACE);
canvas.drawColor(TimeBarBackingColor);
canvas.clipRect(0, 0, getWidth(), getHeight(), Op.REPLACE);
lineBrush.setStrokeWidth(GridSize);
textBrush.setColor(TimeBarTextColor);
// for each beat
for (int b = firstBeat - 1; b <= lastBeat; b++) {
float x = beatToScreen(b) - GridSize;
lineBrush.setColor(NoteFocusColor);
// every four beats
if (b % 4 == 0) {
// draw the time line up to the top
canvas.drawLine(x, 0, x, TimeBarHeight, lineBrush);
// generate and draw the timestamp
textBrush.setTextSize(TimeLineTop * 0.5f);
textBrush.setTextAlign(Paint.Align.LEFT);
float time = 60f * (float) b / (float) score.getTempo();
generateTimeString(time);
canvas.drawText(charBuffer, 0, stringer.length(), x,
midText(textBrush, 0, TimeLineTop), textBrush);
} else {
// only draw a time line and only half-height
// to keep it from screwing up the timestamps
canvas.drawLine(x, TimeLineTop, x, TimeBarHeight, lineBrush);
}
// draw beat lines
lineBrush.setColor(GridLineColor);
canvas.drawLine(x, TimeBarHeight, x, getHeight(), lineBrush);
// draw the beat marker if it's present
if (!playback && b == common.getBeatMarker()) {
fillBrush.setColor(PlayMarkerColor);
canvas.drawBitmap(beatMarkerBmp,
x - beatMarkerBmp.getWidth() * 0.5f,
TimeBarHeight - beatMarkerBmp.getHeight(), fillBrush);
}
}
canvas.clipRect(0, TimeBarHeight, getWidth(), getHeight(), Op.REPLACE);
textBrush.setColor(NoteButtonTextColor);
// for each potential track
for (int t = firstTrack; t <= lastTrack; t++) {
float top = trackToScreen(t);
float noteTop = top + NoteTop;
// draw grid line
lineBrush.setStrokeWidth(GridSize);
lineBrush.setColor(GridLineColor);
float y = top + TrackHeight - GridSize;
canvas.drawLine(0, y, getWidth(), y, lineBrush);
// if there's actually a track here
if (t < trackCount) {
Track track = score.getTrack(t);
Scale scale = track.getScale();
// draw annotations
textBrush.setTextSize(AnnotationSize);
textBrush.setColor(AnnotationColor);
if (track != null) {
float uy = midText(textBrush,
AnnotationTop, AnnotationTop + AnnotationSize);
buildLeftAnnotation(track);
textBrush.setTextAlign(Align.LEFT);
canvas.drawText(charBuffer, 0, stringer.length(),
GapSize, top + uy, textBrush);
buildRightAnnotation(track);
textBrush.setTextAlign(Align.RIGHT);
canvas.drawText(charBuffer, 0, stringer.length(),
getWidth() - GapSize, top + uy, textBrush);
}
// find the visible notes
float timing = track.getTiming();
int firstNote = screenToNote(0, timing);
int lastNote = screenToNote(getWidth(), timing);
int playbackIndex = track.timeToPosition(playbackTime);
// for each note
for (int n = firstNote; n <= lastNote; n++) {
float left = noteToScreen(n, timing);
float x0 = left + GapSize;
float y0 = noteTop;
float x1 = x0 + NoteButtonSize;
float y1 = y0 + NoteButtonSize;
rect.set(x0, y0, x1, y1);
Note note = track.getNote(n);
// draw the cursor highlight if it points here
// and the score is currently editable
if (isEditable() && t == editTrackPos && n == editNotePos) {
lineBrush.setStrokeWidth(CursorSize);
lineBrush.setColor(NoteFocusColor);
y = top + CursorTop;
canvas.drawLine(x0, y, x1, y, lineBrush);
}
// choose button back color based on playback,
// selection state and position within the bar
boolean selected = common.isSelecting() &&
track == common.getSelectionTrack() &&
common.isNoteSelected(n);
boolean playnote = playback &&
playbackIndex == n &&
note != null;
boolean barstart = (n % (track.getSlots() * track.getBeats())) == 0;
boolean pressed = pressing &&
t == trackTouched &&
n == slotTouched;
if (selected) {
fillBrush.setColor(SelectionColor);
} else if (pressed) {
fillBrush.setColor(NotePressedColor);
} else if (playnote) {
fillBrush.setColor(NotePlayingColor);
} else if (barstart) {
fillBrush.setColor(BarBackingColor);
} else {
fillBrush.setColor(ButtonBackingColor);
}
canvas.drawRoundRect(rect, 8, 8, fillBrush);
if (note != null) {
scale.drawNote(canvas, textBrush, rect, NoteButtonPad, note.getPitchNumber());
}
}
}
}
// draw marker and scroll to current position during playback
if (playback) {
canvas.clipRect(0, 0, getWidth(), TimeBarHeight, Op.REPLACE);
float beat = playbackTime * (float) common.getScore().getTempo() / 60f;
int x = (int) beatToScreen(beat);
canvas.drawBitmap(beatMarkerBmp,
x - beatMarkerBmp.getWidth() * 0.5f,
TimeBarHeight - beatMarkerBmp.getHeight(), fillBrush);
if (scroller.isFinished()) {
scrollToPosition(x);
}
}
// handle any fling/scrolling animation in progress
if (scroller.computeScrollOffset()) {
scrollX = scroller.getCurrX();
scrollY = scroller.getCurrY();
// if we've reached scrolling limits, stop the fling
if (enforceScrollLimits()) {
scroller.forceFinished(true);
}
// to keep animation going, we need another draw (later)
postInvalidate();
}
}
/**
* initiate scroll to the specified position
* @param x position to scroll toward
*/
private void scrollToPosition(int x) {
scroller.forceFinished(true);
int w = getWidth();
if (x < 0) {
if (x > -w) {
scroller.startScroll((int)scrollX, (int)scrollY, -w, 0);
} else {
scrollX += x;
enforceScrollLimits();
}
}
if (x > w) {
if (x < 2 * w) {
scroller.startScroll((int)scrollX, (int)scrollY, w, 0);
} else {
scrollX += x;
enforceScrollLimits();
}
}
postInvalidate();
}
/**
* initiate scroll to the specified note position
* @param trackPos index of track containing note
* @param notePos index of note
*/
private void scrollToPosition(int trackPos, int notePos) {
ScoreCommon common = ScoreActivity.common;
Track track = common.getScore().getTrack(trackPos);
scrollToPosition((int)noteToScreen(notePos, track.getTiming()));
}
/**
* handle a cursor position change
*/
private void onCursorChange() {
ScoreCommon common = ScoreActivity.common;
scrollToPosition(common.getTrackPosition(), common.getNotePosition());
}
/**
* update beat scaling factor
*/
private void updateBeatScale() {
Score score = ScoreActivity.common.getScore();
float t = 1;
for (int i = 0, il = score.getTrackCount(); i < il; i++) {
Track track = score.getTrack(i);
t = Math.min(t, track.getTiming());
}
beatScale = 1f / t;
}
/**
* handle a touch as a potential selection
*/
private void attemptSelection() {
ScoreCommon common = ScoreActivity.common;
common.handleSelection(trackTouched, slotTouched);
Notifier.INSTANCE.send(Notifier.SelectionChange);
postInvalidate();
}
/**
* handle start of audio playback
*/
private void onAudioPlaying() {
playback = true;
postInvalidate();
}
/**
* handle audio marker event
* @param ms elapsed time in ms
*/
private void onAudioMarker(int ms) {
playbackTime = (float) ms / 1000f;
postInvalidate();
}
/**
* handle end of score playback
*/
private void onAudioStopped() {
playback = false;
postInvalidate();
}
/**
* handle end of audio tail
*/
private void onAudioOff() {
}
/**
* pass motion events to the gesture detectors
*/
public boolean onTouchEvent(MotionEvent e) {
int action = e.getActionMasked();
// first hook the pressed state
if (action == MotionEvent.ACTION_DOWN) {
pressing = true;
postInvalidate();
}
if (action == MotionEvent.ACTION_UP){
pressing = false;
postInvalidate();
}
// then evaluate with gesture detectors
boolean r = gestureDetector.onTouchEvent(e);
// only pass to superclass if gestures won't take it
return r || super.onTouchEvent(e);
}
@Override
public boolean onDown(MotionEvent e) {
if (!playback) {
ScoreCommon common = ScoreActivity.common;
scroller.forceFinished(true);
float x = e.getX();
float y = e.getY();
// clicked on the time bar?
if (y <= TimeBarHeight) {
// indicate we didn't touch a track
trackTouched = -1;
// set the beat marker
common.setBeatMarker((int)(screenToBeat(x) + 0.5f));
postInvalidate();
} else {
// clicked on a (possible) track
Score score = common.getScore();
trackTouched = screenToTrack(y);
if (trackTouched >= 0 && trackTouched < score.getTrackCount()) {
Track track = score.getTrack(trackTouched);
slotTouched = screenToNote(x, track.getTiming());
}
}
}
return true;
}
@Override
public boolean onScroll(MotionEvent e0, MotionEvent e1, float dx, float dy) {
scrollX += dx;
scrollY += dy;
enforceScrollLimits();
postInvalidate();
return true;
}
@Override
public boolean onFling(MotionEvent e0, MotionEvent e1, float vx, float vy) {
scroller.fling(
(int)scrollX, (int)scrollY,
(int)(-vx), (int)(-vy),
0, Integer.MAX_VALUE,
0, Integer.MAX_VALUE);
postInvalidate();
return true;
}
@Override
public void onLongPress(MotionEvent e) {
ScoreCommon common = ScoreActivity.common;
Score score = common.getScore();
int tc = score.getTrackCount();
if (!playback && trackTouched >= 0 && trackTouched < tc) {
attemptSelection();
}
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
ScoreCommon common = ScoreActivity.common;
int tc = common.getScore().getTrackCount();
if (!playback && trackTouched >= 0 && trackTouched < tc) {
// sound a click
playSoundEffect(SoundEffectConstants.CLICK);
// if we're in a selection
if (common.isSelecting()) {
attemptSelection();
} else {
// store touch point as the new cursor
common.setTrackPosition(trackTouched);
common.setNotePosition(slotTouched);
Notifier.INSTANCE.send(Notifier.CursorChange);
}
return true;
} else {
return false;
}
}
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case Notifier.NewScore:
updateBeatScale();
onCursorChange();
postInvalidate();
break;
case Notifier.ScoreChange:
enforceScrollLimits();
updateBeatScale();
postInvalidate();
break;
case Notifier.CursorChange:
onCursorChange();
break;
case Notifier.AudioPlaying:
onAudioPlaying();
break;
case Notifier.AudioMarker:
onAudioMarker(msg.arg1);
break;
case Notifier.AudioStopped:
onAudioStopped();
break;
case Notifier.AudioOff:
onAudioOff();
break;
case Notifier.SettingChange:
onSettingsChanged();
break;
}
}
@Override
public void onShowPress(MotionEvent e) {
}
@Override
public boolean onDoubleTap(MotionEvent e) {
return false;
}
@Override
public boolean onDoubleTapEvent(MotionEvent e) {
return false;
}
@Override
public boolean onSingleTapConfirmed(MotionEvent e) {
return false;
}
}
| |
package com.atlassian.httpclient.apache.httpcomponents;
import com.atlassian.fugue.Option;
import com.atlassian.httpclient.api.Response;
import com.google.common.base.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.InputStream;
import java.util.Map;
public final class DefaultResponse extends DefaultMessage implements Response
{
private int statusCode;
private String statusText;
private Logger log = LoggerFactory.getLogger(DefaultResponse.class);
public DefaultResponse(Headers headers, InputStream entityStream, Option<Long> maxEntitySize, int statusCode, String statusText)
{
super(headers, entityStream, maxEntitySize);
this.statusCode = statusCode;
this.statusText = statusText;
}
public static DefaultResponseBuilder builder()
{
return new DefaultResponseBuilder();
}
@Override
public int getStatusCode()
{
return statusCode;
}
@Override
public String getStatusText()
{
return statusText;
}
@Override
public boolean isInformational()
{
return statusCode >= 100 && statusCode < 200;
}
@Override
public boolean isSuccessful()
{
return statusCode >= 200 && statusCode < 300;
}
@Override
public boolean isOk()
{
return statusCode == 200;
}
@Override
public boolean isCreated()
{
return statusCode == 201;
}
@Override
public boolean isNoContent()
{
return statusCode == 204;
}
@Override
public boolean isRedirection()
{
return statusCode >= 300 && statusCode < 400;
}
@Override
public boolean isSeeOther()
{
return statusCode == 303;
}
@Override
public boolean isNotModified()
{
return statusCode == 304;
}
@Override
public boolean isClientError()
{
return statusCode >= 400 && statusCode < 500;
}
@Override
public boolean isBadRequest()
{
return statusCode == 400;
}
@Override
public boolean isUnauthorized()
{
return statusCode == 401;
}
@Override
public boolean isForbidden()
{
return statusCode == 403;
}
@Override
public boolean isNotFound()
{
return statusCode == 404;
}
@Override
public boolean isConflict()
{
return statusCode == 409;
}
@Override
public boolean isServerError()
{
return statusCode >= 500 && statusCode < 600;
}
@Override
public boolean isInternalServerError()
{
return statusCode == 500;
}
@Override
public boolean isServiceUnavailable()
{
return statusCode == 503;
}
@Override
public boolean isError()
{
return isClientError() || isServerError();
}
@Override
public boolean isNotSuccessful()
{
return isInformational() || isRedirection() || isError();
}
@Override
public Option<Long> getContentLength()
{
String lengthString = getHeader(Headers.Names.CONTENT_LENGTH);
if (lengthString != null)
{
try
{
Option<Long> parsedLength = Option.some(Long.parseLong(lengthString));
return parsedLength.flatMap(
new Function<Long, Option<Long>>()
{
@Override
public Option<Long> apply(Long aLong)
{
if (aLong < 0)
{
log.warn("Unable to parse content length. Received out of range value {}", aLong);
return Option.none();
}
else
{
return Option.some(aLong);
}
}
});
}
catch (NumberFormatException e)
{
log.warn("Unable to parse content length {}", lengthString);
return Option.none();
}
}
else
{
return Option.none();
}
}
public static class DefaultResponseBuilder implements Builder
{
private final CommonBuilder<DefaultResponse> commonBuilder;
private String statusText;
private int statusCode;
private long maxEntitySize;
private DefaultResponseBuilder()
{
this.commonBuilder = new CommonBuilder<DefaultResponse>();
}
@Override
public DefaultResponseBuilder setContentType(final String contentType)
{
commonBuilder.setContentType(contentType);
return this;
}
@Override
public DefaultResponseBuilder setContentCharset(final String contentCharset)
{
commonBuilder.setContentCharset(contentCharset);
return this;
}
@Override
public DefaultResponseBuilder setHeaders(final Map<String, String> headers)
{
commonBuilder.setHeaders(headers);
return this;
}
@Override
public DefaultResponseBuilder setHeader(final String name, final String value)
{
commonBuilder.setHeader(name, value);
return this;
}
@Override
public DefaultResponseBuilder setEntity(final String entity)
{
commonBuilder.setEntity(entity);
return this;
}
@Override
public DefaultResponseBuilder setEntityStream(final InputStream entityStream, final String encoding)
{
commonBuilder.setEntityStream(entityStream);
commonBuilder.setContentCharset(encoding);
return this;
}
@Override
public DefaultResponseBuilder setEntityStream(final InputStream entityStream)
{
commonBuilder.setEntityStream(entityStream);
return this;
}
@Override
public DefaultResponseBuilder setStatusText(final String statusText)
{
this.statusText = statusText;
return this;
}
@Override
public DefaultResponseBuilder setStatusCode(final int statusCode)
{
this.statusCode = statusCode;
return this;
}
public DefaultResponseBuilder setMaxEntitySize(long maxEntitySize)
{
this.maxEntitySize = maxEntitySize;
return this;
}
@Override
public DefaultResponse build()
{
return new DefaultResponse(commonBuilder.getHeaders(), commonBuilder.getEntityStream(),
Option.option(maxEntitySize), statusCode, statusText);
}
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.11.29 at 12:35:53 PM GMT
//
package org.mule.modules.hybris.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for tableBlockDTO complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="tableBlockDTO">
* <complexContent>
* <extension base="{}contentBlockDTO">
* <sequence>
* <element name="allTableTemplates" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="tableBlock" type="{}tableBlockDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="cells" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="cellEntry" type="{}cellEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="columns" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="columnEntry" type="{}columnEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="footerRowCount" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="headerRowCount" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="isTemplate" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="locked" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="rows" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="rowEntry" type="{}rowEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="tableDataHolders" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="abstractTableDataHolder" type="{}abstractTableDataHolderDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="tableStyle" type="{}tableStyleDTO" minOccurs="0"/>
* <element name="tableTemplate" type="{}tableBlockDTO" minOccurs="0"/>
* <element name="width" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/>
* <element name="widthType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "tableBlockDTO", propOrder = {
"allTableTemplates",
"cells",
"columns",
"footerRowCount",
"headerRowCount",
"isTemplate",
"locked",
"rows",
"tableDataHolders",
"tableStyle",
"tableTemplate",
"width",
"widthType"
})
public class TableBlockDTO
extends ContentBlockDTO
{
protected TableBlockDTO.AllTableTemplates allTableTemplates;
protected TableBlockDTO.Cells cells;
protected TableBlockDTO.Columns columns;
protected Integer footerRowCount;
protected Integer headerRowCount;
protected Boolean isTemplate;
protected Boolean locked;
protected TableBlockDTO.Rows rows;
protected TableBlockDTO.TableDataHolders tableDataHolders;
protected TableStyleDTO tableStyle;
protected TableBlockDTO tableTemplate;
protected Double width;
protected String widthType;
/**
* Gets the value of the allTableTemplates property.
*
* @return
* possible object is
* {@link TableBlockDTO.AllTableTemplates }
*
*/
public TableBlockDTO.AllTableTemplates getAllTableTemplates() {
return allTableTemplates;
}
/**
* Sets the value of the allTableTemplates property.
*
* @param value
* allowed object is
* {@link TableBlockDTO.AllTableTemplates }
*
*/
public void setAllTableTemplates(TableBlockDTO.AllTableTemplates value) {
this.allTableTemplates = value;
}
/**
* Gets the value of the cells property.
*
* @return
* possible object is
* {@link TableBlockDTO.Cells }
*
*/
public TableBlockDTO.Cells getCells() {
return cells;
}
/**
* Sets the value of the cells property.
*
* @param value
* allowed object is
* {@link TableBlockDTO.Cells }
*
*/
public void setCells(TableBlockDTO.Cells value) {
this.cells = value;
}
/**
* Gets the value of the columns property.
*
* @return
* possible object is
* {@link TableBlockDTO.Columns }
*
*/
public TableBlockDTO.Columns getColumns() {
return columns;
}
/**
* Sets the value of the columns property.
*
* @param value
* allowed object is
* {@link TableBlockDTO.Columns }
*
*/
public void setColumns(TableBlockDTO.Columns value) {
this.columns = value;
}
/**
* Gets the value of the footerRowCount property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getFooterRowCount() {
return footerRowCount;
}
/**
* Sets the value of the footerRowCount property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setFooterRowCount(Integer value) {
this.footerRowCount = value;
}
/**
* Gets the value of the headerRowCount property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getHeaderRowCount() {
return headerRowCount;
}
/**
* Sets the value of the headerRowCount property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setHeaderRowCount(Integer value) {
this.headerRowCount = value;
}
/**
* Gets the value of the isTemplate property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsTemplate() {
return isTemplate;
}
/**
* Sets the value of the isTemplate property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsTemplate(Boolean value) {
this.isTemplate = value;
}
/**
* Gets the value of the locked property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isLocked() {
return locked;
}
/**
* Sets the value of the locked property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setLocked(Boolean value) {
this.locked = value;
}
/**
* Gets the value of the rows property.
*
* @return
* possible object is
* {@link TableBlockDTO.Rows }
*
*/
public TableBlockDTO.Rows getRows() {
return rows;
}
/**
* Sets the value of the rows property.
*
* @param value
* allowed object is
* {@link TableBlockDTO.Rows }
*
*/
public void setRows(TableBlockDTO.Rows value) {
this.rows = value;
}
/**
* Gets the value of the tableDataHolders property.
*
* @return
* possible object is
* {@link TableBlockDTO.TableDataHolders }
*
*/
public TableBlockDTO.TableDataHolders getTableDataHolders() {
return tableDataHolders;
}
/**
* Sets the value of the tableDataHolders property.
*
* @param value
* allowed object is
* {@link TableBlockDTO.TableDataHolders }
*
*/
public void setTableDataHolders(TableBlockDTO.TableDataHolders value) {
this.tableDataHolders = value;
}
/**
* Gets the value of the tableStyle property.
*
* @return
* possible object is
* {@link TableStyleDTO }
*
*/
public TableStyleDTO getTableStyle() {
return tableStyle;
}
/**
* Sets the value of the tableStyle property.
*
* @param value
* allowed object is
* {@link TableStyleDTO }
*
*/
public void setTableStyle(TableStyleDTO value) {
this.tableStyle = value;
}
/**
* Gets the value of the tableTemplate property.
*
* @return
* possible object is
* {@link TableBlockDTO }
*
*/
public TableBlockDTO getTableTemplate() {
return tableTemplate;
}
/**
* Sets the value of the tableTemplate property.
*
* @param value
* allowed object is
* {@link TableBlockDTO }
*
*/
public void setTableTemplate(TableBlockDTO value) {
this.tableTemplate = value;
}
/**
* Gets the value of the width property.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getWidth() {
return width;
}
/**
* Sets the value of the width property.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setWidth(Double value) {
this.width = value;
}
/**
* Gets the value of the widthType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWidthType() {
return widthType;
}
/**
* Sets the value of the widthType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWidthType(String value) {
this.widthType = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="tableBlock" type="{}tableBlockDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"tableBlock"
})
public static class AllTableTemplates {
protected List<TableBlockDTO> tableBlock;
/**
* Gets the value of the tableBlock property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the tableBlock property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTableBlock().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TableBlockDTO }
*
*
*/
public List<TableBlockDTO> getTableBlock() {
if (tableBlock == null) {
tableBlock = new ArrayList<TableBlockDTO>();
}
return this.tableBlock;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="cellEntry" type="{}cellEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"cellEntry"
})
public static class Cells {
protected List<CellEntryDTO> cellEntry;
/**
* Gets the value of the cellEntry property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the cellEntry property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCellEntry().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link CellEntryDTO }
*
*
*/
public List<CellEntryDTO> getCellEntry() {
if (cellEntry == null) {
cellEntry = new ArrayList<CellEntryDTO>();
}
return this.cellEntry;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="columnEntry" type="{}columnEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"columnEntry"
})
public static class Columns {
protected List<ColumnEntryDTO> columnEntry;
/**
* Gets the value of the columnEntry property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the columnEntry property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getColumnEntry().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ColumnEntryDTO }
*
*
*/
public List<ColumnEntryDTO> getColumnEntry() {
if (columnEntry == null) {
columnEntry = new ArrayList<ColumnEntryDTO>();
}
return this.columnEntry;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="rowEntry" type="{}rowEntryDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"rowEntry"
})
public static class Rows {
protected List<RowEntryDTO> rowEntry;
/**
* Gets the value of the rowEntry property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the rowEntry property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getRowEntry().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RowEntryDTO }
*
*
*/
public List<RowEntryDTO> getRowEntry() {
if (rowEntry == null) {
rowEntry = new ArrayList<RowEntryDTO>();
}
return this.rowEntry;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="abstractTableDataHolder" type="{}abstractTableDataHolderDTO" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"abstractTableDataHolder"
})
public static class TableDataHolders {
protected List<AbstractTableDataHolderDTO> abstractTableDataHolder;
/**
* Gets the value of the abstractTableDataHolder property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the abstractTableDataHolder property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAbstractTableDataHolder().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AbstractTableDataHolderDTO }
*
*
*/
public List<AbstractTableDataHolderDTO> getAbstractTableDataHolder() {
if (abstractTableDataHolder == null) {
abstractTableDataHolder = new ArrayList<AbstractTableDataHolderDTO>();
}
return this.abstractTableDataHolder;
}
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.common;
/**
* <p>Represents a 2D matrix of bits. In function arguments below, and throughout the common
* module, x is the column position, and y is the row position. The ordering is always x, y.
* The origin is at the top-left.</p>
*
* <p>Internally the bits are represented in a 1-D array of 32-bit ints. However, each row begins
* with a new int. This is done intentionally so that we can copy out a row into a BitArray very
* efficiently.</p>
*
* <p>The ordering of bits is row-major. Within each int, the least significant bits are used first,
* meaning they represent lower x values. This is compatible with BitArray's implementation.</p>
*
* @author Sean Owen
* @author dswitkin@google.com (Daniel Switkin)
*/
public final class BitMatrix {
// TODO: Just like BitArray, these need to be public so ProGuard can inline them.
public final int width;
public final int height;
public final int rowSize;
public final int[] bits;
// A helper to construct a square matrix.
public BitMatrix(int dimension) {
this(dimension, dimension);
}
public BitMatrix(int width, int height) {
if (width < 1 || height < 1) {
throw new IllegalArgumentException("Both dimensions must be greater than 0");
}
this.width = width;
this.height = height;
this.rowSize = (width + 31) >> 5;
bits = new int[rowSize * height];
}
/**
* <p>Gets the requested bit, where true means black.</p>
*
* @param x The horizontal component (i.e. which column)
* @param y The vertical component (i.e. which row)
* @return value of given bit in matrix
*/
public boolean get(int x, int y) {
int offset = y * rowSize + (x >> 5);
return ((bits[offset] >>> (x & 0x1f)) & 1) != 0;
}
/**
* <p>Sets the given bit to true.</p>
*
* @param x The horizontal component (i.e. which column)
* @param y The vertical component (i.e. which row)
*/
public void set(int x, int y) {
int offset = y * rowSize + (x >> 5);
bits[offset] |= 1 << (x & 0x1f);
}
/**
* <p>Flips the given bit.</p>
*
* @param x The horizontal component (i.e. which column)
* @param y The vertical component (i.e. which row)
*/
public void flip(int x, int y) {
int offset = y * rowSize + (x >> 5);
bits[offset] ^= 1 << (x & 0x1f);
}
/**
* Clears all bits (sets to false).
*/
public void clear() {
int max = bits.length;
for (int i = 0; i < max; i++) {
bits[i] = 0;
}
}
/**
* <p>Sets a square region of the bit matrix to true.</p>
*
* @param left The horizontal position to begin at (inclusive)
* @param top The vertical position to begin at (inclusive)
* @param width The width of the region
* @param height The height of the region
*/
public void setRegion(int left, int top, int width, int height) {
if (top < 0 || left < 0) {
throw new IllegalArgumentException("Left and top must be nonnegative");
}
if (height < 1 || width < 1) {
throw new IllegalArgumentException("Height and width must be at least 1");
}
int right = left + width;
int bottom = top + height;
if (bottom > this.height || right > this.width) {
throw new IllegalArgumentException("The region must fit inside the matrix");
}
for (int y = top; y < bottom; y++) {
int offset = y * rowSize;
for (int x = left; x < right; x++) {
bits[offset + (x >> 5)] |= 1 << (x & 0x1f);
}
}
}
/**
* A fast method to retrieve one row of data from the matrix as a BitArray.
*
* @param y The row to retrieve
* @param row An optional caller-allocated BitArray, will be allocated if null or too small
* @return The resulting BitArray - this reference should always be used even when passing
* your own row
*/
public BitArray getRow(int y, BitArray row) {
if (row == null || row.getSize() < width) {
row = new BitArray(width);
}
int offset = y * rowSize;
for (int x = 0; x < rowSize; x++) {
row.setBulk(x << 5, bits[offset + x]);
}
return row;
}
/**
* This is useful in detecting a corner of a 'pure' barcode.
*
* @return {x,y} coordinate of top-left-most 1 bit, or null if it is all white
*/
public int[] getTopLeftOnBit() {
int bitsOffset = 0;
while (bitsOffset < bits.length && bits[bitsOffset] == 0) {
bitsOffset++;
}
if (bitsOffset == bits.length) {
return null;
}
int y = bitsOffset / rowSize;
int x = (bitsOffset % rowSize) << 5;
int theBits = bits[bitsOffset];
int bit = 0;
while ((theBits << (31-bit)) == 0) {
bit++;
}
x += bit;
return new int[] {x, y};
}
/**
* @return The width of the matrix
*/
public int getWidth() {
return width;
}
/**
* @return The height of the matrix
*/
public int getHeight() {
return height;
}
public boolean equals(Object o) {
if (!(o instanceof BitMatrix)) {
return false;
}
BitMatrix other = (BitMatrix) o;
if (width != other.width || height != other.height ||
rowSize != other.rowSize || bits.length != other.bits.length) {
return false;
}
for (int i = 0; i < bits.length; i++) {
if (bits[i] != other.bits[i]) {
return false;
}
}
return true;
}
public int hashCode() {
int hash = width;
hash = 31 * hash + width;
hash = 31 * hash + height;
hash = 31 * hash + rowSize;
for (int i = 0; i < bits.length; i++) {
hash = 31 * hash + bits[i];
}
return hash;
}
public String toString() {
StringBuffer result = new StringBuffer(height * (width + 1));
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
result.append(get(x, y) ? "X " : " ");
}
result.append('\n');
}
return result.toString();
}
}
| |
package org.quinto.math;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Random;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.quinto.math.MathUtils.pow;
import static org.quinto.math.MathUtils.pow10;
import static org.quinto.math.MathUtils.powExact;
public class MathUtilsPowTest
{
private static final double DELTA = 0.0001;
private static final double EXACT = -1.0;
private static final Random RANDOM = new Random( System.nanoTime() );
private static final double DOUBLES[] = new double[]{ Double.NEGATIVE_INFINITY, -0.0, Double.NaN, 0.0, Double.POSITIVE_INFINITY,
Long.MIN_VALUE, Integer.MIN_VALUE, Short.MIN_VALUE, Byte.MIN_VALUE,
-(double)Long.MIN_VALUE, -(double)Integer.MIN_VALUE, -(double)Short.MIN_VALUE, -(double)Byte.MIN_VALUE,
Byte.MAX_VALUE, Short.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE,
-Byte.MAX_VALUE, -Short.MAX_VALUE, -Integer.MAX_VALUE, -Long.MAX_VALUE,
Double.MAX_VALUE, Float.MAX_VALUE, Double.MIN_VALUE, Float.MIN_VALUE, Double.MIN_NORMAL, Float.MIN_NORMAL,
-Double.MAX_VALUE, -Float.MAX_VALUE, -Double.MIN_VALUE, -Float.MIN_VALUE, -Double.MIN_NORMAL, -Float.MIN_NORMAL,
0.5, 0.1, 0.2, 0.8, 1.1, 1.2, 1.5, 1.8, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.3, 2.2, 2.5, 2.8, 33.0, 33.1, 33.5, 33.8, 10.0, 300.0, 400.0, 500.0,
-0.5, -0.1, -0.2, -0.8, -1.1, -1.2, -1.5, -1.8, -1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -1.3, -2.2, -2.5, -2.8, -33.0, -33.1, -33.5, -33.8, -10.0, -300.0, -400.0, -500.0 };
private static final int INTS[] = new int[]{ Integer.MAX_VALUE, Integer.MAX_VALUE - 1, Integer.MIN_VALUE, Integer.MIN_VALUE + 1, Integer.MIN_VALUE + 2, 0, 1, 2, 3, 5, 8, 10, 20, 100, 300, 500, -1, -2, -3, -5, -8, -10, -20, -100, -300, -500 };
private static final long LONGS[] = new long[]{ Long.MAX_VALUE, Long.MAX_VALUE - 1, Long.MIN_VALUE, Long.MIN_VALUE + 1, Long.MIN_VALUE + 2, Integer.MAX_VALUE, Integer.MAX_VALUE - 1, Integer.MIN_VALUE, Integer.MIN_VALUE + 1, Integer.MIN_VALUE + 2, 0, 1, 2, 3, 5, 8, 10, 20, 100, 300, 500, -1, -2, -3, -5, -8, -10, -20, -100, -300, -500 };
public MathUtilsPowTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test( timeout = 5000L )
public void pow10Casual()
{
assertEquals( 1.0, pow10( 0 ), EXACT );
assertEquals( 10.0, pow10( 1 ), EXACT );
assertEquals( 100.0, pow10( 2 ), EXACT );
assertEquals( 1000.0, pow10( 3 ), EXACT );
assertEquals( 1e5, pow10( 5 ), EXACT );
assertEquals( 1e23, pow10( 23 ), EXACT );
assertEquals( 1e105, pow10( 105 ), EXACT );
assertEquals( 0.1, pow10( -1 ), EXACT );
assertEquals( 0.01, pow10( -2 ), EXACT );
assertEquals( 0.001, pow10( -3 ), EXACT );
assertEquals( 1e-5, pow10( -5 ), EXACT );
assertEquals( 1e-23, pow10( -23 ), EXACT );
assertEquals( 1e-105, pow10( -105 ), EXACT );
}
@Test( timeout = 5000L )
public void pow10Ten()
{
for ( int i = -325; i < 311; i++ ) assertEquals( Double.parseDouble( "1e" + i ), pow10( i ), EXACT );
for ( int i = -325; i < 311; i++ ) assertEquals( BigDecimal.ONE.scaleByPowerOfTen( i ).doubleValue(), pow10( i ), EXACT );
}
@Test( timeout = 5000L )
public void pow10Special()
{
assertEquals( 0.0, pow10( Integer.MIN_VALUE ), EXACT );
assertEquals( 0.0, pow10( -5001 ), EXACT );
assertEquals( 0.1, pow10( -1 ), EXACT );
assertEquals( 1.0, pow10( 0 ), EXACT );
assertEquals( 10.0, pow10( 1 ), EXACT );
assertEquals( Double.POSITIVE_INFINITY, pow10( 5000 ), EXACT );
assertEquals( Double.POSITIVE_INFINITY, pow10( Integer.MAX_VALUE ), EXACT );
}
@Test( timeout = 5000L )
public void powDoubleIntTen()
{
for ( int i = -325; i < 311; i++ ) assertEquals( Double.parseDouble( "1e" + i ), pow( 10.0, i ), EXACT );
for ( int i = -325; i < 311; i++ ) assertEquals( BigDecimal.ONE.scaleByPowerOfTen( i ).doubleValue(), pow( 10.0, i ), EXACT );
}
@Test( timeout = 5000L )
public void powDoubleIntRandom()
{
for ( int k = 0; k < 500; k++ )
{
double d = RANDOM.nextDouble() * 400.0 - 150.0;
for ( int i = 0; i < 30; i++ )
{
assertEquals( Math.pow( d, i ), pow( d, i ), Math.max( Math.pow( Math.abs( d ), i ), 1.0 ) * DELTA );
}
}
}
@Test//( timeout = 5000L )
public void powDoubleIntSpecial()
{
// Special cases from Math.pow javadoc:
// If the second argument is positive or negative zero, then the result is 1.0.
for ( double d : DOUBLES ) assertEquals( 1.0, pow( d, 0 ), EXACT );
// If the second argument is 1.0, then the result is the same as the first argument.
for ( double d : DOUBLES ) assertEquals( d, pow( d, 1 ), EXACT );
// If the second argument is NaN, then the result is NaN. <- Impossible with int.
// If the first argument is NaN and the second argument is nonzero, then the result is NaN.
for ( int i : INTS ) if ( i != 0 ) assertEquals( Double.NaN, pow( Double.NaN, i ), EXACT );
// If the absolute value of the first argument is greater than 1 and the second argument is positive infinity, or
// the absolute value of the first argument is less than 1 and the second argument is negative infinity, then the result is positive infinity.
for ( double d : DOUBLES ) if ( Math.abs( d ) > 1.0 ) assertEquals( Double.POSITIVE_INFINITY, pow( d, Integer.MAX_VALUE - 1 ), EXACT );
for ( double d : DOUBLES ) if ( Math.abs( d ) < 1.0 ) assertEquals( Double.POSITIVE_INFINITY, pow( d, Integer.MIN_VALUE ), EXACT );
// Note: Integer.MAX_VALUE isn't actually an infinity, so its parity affects the sign of resulting zero.
for ( double d : DOUBLES ) if ( Math.abs( d ) > 1.0 ) assertTrue( Double.isInfinite( pow( d, Integer.MAX_VALUE ) ) );
for ( double d : DOUBLES ) if ( Math.abs( d ) < 1.0 ) assertTrue( Double.isInfinite( pow( d, Integer.MIN_VALUE + 1 ) ) );
// If the absolute value of the first argument is greater than 1 and the second argument is negative infinity, or
// the absolute value of the first argument is less than 1 and the second argument is positive infinity, then the result is positive zero.
for ( double d : DOUBLES ) if ( Math.abs( d ) > 1.0 ) assertEquals( 0.0, pow( d, Integer.MIN_VALUE ), EXACT );
for ( double d : DOUBLES ) if ( Math.abs( d ) < 1.0 ) assertEquals( 0.0, pow( d, Integer.MAX_VALUE - 1 ), EXACT );
// Note: Integer.MAX_VALUE isn't actually an infinity, so its parity affects the sign of resulting zero.
for ( double d : DOUBLES ) if ( Math.abs( d ) > 1.0 ) assertTrue( pow( d, Integer.MIN_VALUE + 1 ) == 0.0 );
for ( double d : DOUBLES ) if ( Math.abs( d ) < 1.0 ) assertTrue( pow( d, Integer.MAX_VALUE ) == 0.0 );
// If the absolute value of the first argument equals 1 and the second argument is infinite, then the result is NaN. <- Impossible with int.
// If the first argument is positive zero and the second argument is greater than zero, or
// the first argument is positive infinity and the second argument is less than zero, then the result is positive zero.
for ( int i : INTS ) if ( i > 0 ) assertEquals( 0.0, pow( 0.0, i ), EXACT );
for ( int i : INTS ) if ( i < 0 ) assertEquals( 0.0, pow( Double.POSITIVE_INFINITY, i ), EXACT );
// If the first argument is positive zero and the second argument is less than zero, or
// the first argument is positive infinity and the second argument is greater than zero, then the result is positive infinity.
for ( int i : INTS ) if ( i < 0 ) assertEquals( Double.POSITIVE_INFINITY, pow( 0.0, i ), EXACT );
for ( int i : INTS ) if ( i > 0 ) assertEquals( Double.POSITIVE_INFINITY, pow( Double.POSITIVE_INFINITY, i ), EXACT );
// If the first argument is negative zero and the second argument is greater than zero but not a finite odd integer, or
// the first argument is negative infinity and the second argument is less than zero but not a finite odd integer, then the result is positive zero.
for ( int i : INTS ) if ( i > 0 && ( i & 1 ) == 0 ) assertEquals( 0.0, pow( -0.0, i ), EXACT );
for ( int i : INTS ) if ( i < 0 && ( i & 1 ) == 0 ) assertEquals( 0.0, pow( Double.NEGATIVE_INFINITY, i ), EXACT );
// If the first argument is negative zero and the second argument is a positive finite odd integer, or
// the first argument is negative infinity and the second argument is a negative finite odd integer, then the result is negative zero.
for ( int i : INTS ) if ( i > 0 && ( i & 1 ) == 1 ) assertEquals( -0.0, pow( -0.0, i ), EXACT );
for ( int i : INTS ) if ( i < 0 && ( i & 1 ) == 1 ) assertEquals( -0.0, pow( Double.NEGATIVE_INFINITY, i ), EXACT );
// If the first argument is negative zero and the second argument is less than zero but not a finite odd integer, or
// the first argument is negative infinity and the second argument is greater than zero but not a finite odd integer, then the result is positive infinity.
for ( int i : INTS ) if ( i > 0 && ( i & 1 ) == 0 ) assertEquals( Double.POSITIVE_INFINITY, pow( Double.NEGATIVE_INFINITY, i ), EXACT );
for ( int i : INTS ) if ( i < 0 && ( i & 1 ) == 0 ) assertEquals( Double.POSITIVE_INFINITY, pow( -0.0, i ), EXACT );
// If the first argument is negative zero and the second argument is a negative finite odd integer, or
// the first argument is negative infinity and the second argument is a positive finite odd integer, then the result is negative infinity.
for ( int i : INTS ) if ( i > 0 && ( i & 1 ) == 1 ) assertEquals( Double.NEGATIVE_INFINITY, pow( Double.NEGATIVE_INFINITY, i ), EXACT );
for ( int i : INTS ) if ( i < 0 && ( i & 1 ) == 1 ) assertEquals( Double.NEGATIVE_INFINITY, pow( -0.0, i ), EXACT );
for ( double d : DOUBLES )
{
// If the first argument is finite and less than zero
if ( d < 0.0 && Double.isFinite( d ) )
{
for ( int i : INTS )
{
// if the second argument is a finite even integer, the result is equal to the result of raising the absolute value of the first argument to the power of the second argument
if ( ( i & 1 ) == 0 ) assertEquals( pow( -d, i ), pow( d, i ), EXACT );
// if the second argument is a finite odd integer, the result is equal to the negative of the result of raising the absolute value of the first argument to the power of the second argument
else assertEquals( -pow( -d, i ), pow( d, i ), EXACT );
// if the second argument is finite and not an integer, then the result is NaN. <- Impossible with int.
}
}
}
// If both arguments are integers, then the result is exactly equal to the mathematical result of raising the first argument to the power
// of the second argument if that result can in fact be represented exactly as a {@code double} value. <- Casual test.
}
@Test( timeout = 5000L )
public void powDoubleIntCasual()
{
assertEquals( 0.25, pow( 0.5, 2 ), EXACT );
assertEquals( 1.0, pow( 1.0, 3 ), EXACT );
assertEquals( 81.0, pow( 3.0, 4 ), EXACT );
for ( int i : INTS ) for ( double d : DOUBLES ) assertEquals( Math.pow( d, i ), pow( d, i ), Math.min( Math.max( Math.pow( Math.abs( d ), i ), 1.0 ) * DELTA, 10e290 ) );
}
@Test( timeout = 5000L )
public void powIntIntRandom()
{
BigInteger mod = BigUtils.BI_MAX_INT.add( BigInteger.ONE ).shiftLeft( 1 );
for ( int i = 0; i < 100; i++ )
{
int base = RANDOM.nextInt();
for ( int j = 0; j < 100; j++ )
{
int power = RANDOM.nextInt( Integer.MAX_VALUE );
BigInteger expected = BigInteger.valueOf( base ).modPow( BigInteger.valueOf( power ), mod );
assertEquals( i + " ^ " + j, expected.intValue(), pow( base, power ) );
}
}
}
@Test( timeout = 5000L )
public void powIntIntExact()
{
BigInteger mod = BigUtils.BI_MAX_INT.add( BigInteger.ONE ).shiftLeft( 1 );
for ( int i : INTS ) for ( int j : INTS ) if ( j >= 0 )
{
BigInteger expected = BigInteger.valueOf( i ).modPow( BigInteger.valueOf( j ), mod );
assertEquals( i + " ^ " + j, expected.intValue(), pow( i, j ) );
}
}
@Test( timeout = 5000L )
public void powIntIntCasual()
{
assertEquals( 3, pow( 3, 1 ) );
assertEquals( 25, pow( 5, 2 ) );
assertEquals( 729, pow( 9, 3 ) );
double upper = Math.log( Integer.MAX_VALUE );
for ( int i : INTS ) if ( i > -10000 && i < 10000 )
{
int up = ( int )( upper / Math.log( Math.abs( i ) ) );
if ( up == 0 ) up = Integer.MAX_VALUE;
for ( int j : INTS ) if ( j >= ( i == 0 ? 0 : -up ) && j <= up ) assertEquals( i + " ^ " + j, ( int )Math.pow( i, j ), pow( i, j ) );
}
}
@Test( expected = ArithmeticException.class, timeout = 5000L )
public void powIntIntException()
{
pow( 0, -1 );
}
@Test( timeout = 5000L )
public void powIntIntSpecial()
{
for ( int i : INTS ) if ( i != 0 ) for ( int j : INTS ) if ( j < 0 ) assertEquals( i == 1 ? 1 : i == -1 ? ( ( j & 1 ) == 1 ? -1 : 1 ) : 0, pow( i, j ) );
}
@Test( timeout = 5000L )
public void powLongIntRandom()
{
BigInteger mod = BigUtils.BI_MAX_LONG.add( BigInteger.ONE ).shiftLeft( 1 );
for ( int i = 0; i < 100; i++ )
{
long base = RANDOM.nextLong();
for ( int j = 0; j < 100; j++ )
{
int power = RANDOM.nextInt( Integer.MAX_VALUE );
BigInteger expected = BigInteger.valueOf( base ).modPow( BigInteger.valueOf( power ), mod );
assertEquals( i + " ^ " + j, expected.longValue(), pow( base, power ) );
}
}
}
@Test( timeout = 5000L )
public void powLongIntExact()
{
BigInteger mod = BigUtils.BI_MAX_LONG.add( BigInteger.ONE ).shiftLeft( 1 );
for ( long i : LONGS ) for ( int j : INTS ) if ( j >= 0 )
{
BigInteger expected = BigInteger.valueOf( i ).modPow( BigInteger.valueOf( j ), mod );
assertEquals( i + " ^ " + j, expected.longValue(), pow( i, j ) );
}
}
@Test( timeout = 5000L )
public void powLongIntCasual()
{
assertEquals( 3L, pow( 3L, 1 ) );
assertEquals( 25L, pow( 5L, 2 ) );
assertEquals( 729L, pow( 9L, 3 ) );
double upper = Math.log( Long.MAX_VALUE );
for ( long i : LONGS ) if ( i > -10000L && i < 10000L )
{
int up = ( int )( upper / Math.log( Math.abs( i ) ) );
if ( up == 0 ) up = Integer.MAX_VALUE;
for ( int j : INTS ) if ( j >= ( i == 0L ? 0 : -up ) && j <= up ) assertEquals( i + " ^ " + j, ( long )Math.pow( i, j ), pow( i, j ) );
}
}
@Test( expected = ArithmeticException.class, timeout = 5000L )
public void powLongIntException()
{
pow( 0L, -1 );
}
@Test( timeout = 5000L )
public void powLongIntSpecial()
{
for ( long i : LONGS ) if ( i != 0L ) for ( int j : INTS ) if ( j < 0 ) assertEquals( i == 1 ? 1L : i == -1 ? ( ( j & 1 ) == 1 ? -1L : 1L ) : 0L, pow( i, j ) );
}
@Test( timeout = 5000L )
public void powExactIntIntRandom()
{
for ( int i = 0; i < 100; i++ )
{
int base = RANDOM.nextInt();
for ( int j = 0; j < 100; j++ )
{
int power = RANDOM.nextInt( Integer.MAX_VALUE );
boolean overflow = BigInteger.valueOf( 31 - Integer.numberOfLeadingZeros( Math.abs( base ) ) ).multiply( BigInteger.valueOf( power ) ).compareTo( BigInteger.valueOf( 32 ) ) > 0;
BigInteger expected = overflow ? null : BigInteger.valueOf( base ).pow( power );
if ( expected != null && expected.bitLength() <= 31 ) assertEquals( base + " ^ " + power, expected.intValue(), powExact( base, power ) );
else
{
try
{
powExact( base, power );
fail( "Should overflow: " + base + " ^ " + power );
}
catch ( ArithmeticException e )
{
}
}
}
}
}
@Test( timeout = 5000L )
public void powExactIntIntCasual()
{
assertEquals( 3, powExact( 3, 1 ) );
assertEquals( 25, powExact( 5, 2 ) );
assertEquals( 729, powExact( 9, 3 ) );
}
@Test( timeout = 5000L )
public void powExactIntIntExact()
{
for ( int base : INTS )
{
for ( int power : INTS )
{
if ( power < 0 ) continue;
boolean overflow = BigInteger.valueOf( 31 - Integer.numberOfLeadingZeros( Math.abs( base ) ) ).multiply( BigInteger.valueOf( power ) ).compareTo( BigInteger.valueOf( 32 ) ) > 0;
BigInteger expected = overflow ? null : BigInteger.valueOf( base ).pow( power );
if ( expected != null && expected.bitLength() <= 31 )
{
int value = powExact( base, power );
assertEquals( base + " ^ " + power, expected.intValue(), value );
assertEquals( base + " ^ " + power, ( int )Math.pow( base, power ), value );
assertEquals( base + " ^ " + power, pow( base, power ), value );
}
else
{
try
{
powExact( base, power );
fail( "Should overflow: " + base + " ^ " + power );
}
catch ( ArithmeticException e )
{
}
}
}
}
}
@Test( expected = ArithmeticException.class, timeout = 5000L )
public void powExactIntIntException()
{
powExact( 0, -1 );
}
@Test( timeout = 5000L )
public void powExactIntIntSpecial()
{
for ( int i : INTS ) if ( i != 0 ) for ( int j : INTS ) if ( j < 0 ) assertEquals( i == 1 ? 1 : i == -1 ? ( ( j & 1 ) == 1 ? -1 : 1 ) : 0, powExact( i, j ) );
}
@Test( timeout = 5000L )
public void powExactLongIntRandom()
{
for ( int i = 0; i < 100; i++ )
{
long base = RANDOM.nextLong();
for ( int j = 0; j < 100; j++ )
{
int power = RANDOM.nextInt( Integer.MAX_VALUE );
boolean overflow = BigInteger.valueOf( 63 - Long.numberOfLeadingZeros( Math.abs( base ) ) ).multiply( BigInteger.valueOf( power ) ).compareTo( BigInteger.valueOf( 64 ) ) > 0;
BigInteger expected = overflow ? null : BigInteger.valueOf( base ).pow( power );
if ( expected != null && expected.bitLength() <= 63 ) assertEquals( base + " ^ " + power, expected.longValue(), powExact( base, power ) );
else
{
try
{
powExact( base, power );
fail( "Should overflow: " + base + " ^ " + power );
}
catch ( ArithmeticException e )
{
}
}
}
}
}
@Test( timeout = 5000L )
public void powExactLongIntCasual()
{
assertEquals( 3L, powExact( 3L, 1 ) );
assertEquals( 25L, powExact( 5L, 2 ) );
assertEquals( 729L, powExact( 9L, 3 ) );
}
@Test( timeout = 5000L )
public void powExactLongIntExact()
{
for ( long base : LONGS )
{
for ( int power : INTS )
{
if ( power < 0 ) continue;
boolean overflow = BigInteger.valueOf( 63 - Long.numberOfLeadingZeros( Math.abs( base ) ) ).multiply( BigInteger.valueOf( power ) ).compareTo( BigInteger.valueOf( 64 ) ) > 0;
BigInteger expected = overflow ? null : BigInteger.valueOf( base ).pow( power );
if ( expected != null && expected.bitLength() <= 63 )
{
long value = powExact( base, power );
assertEquals( base + " ^ " + power, expected.longValue(), value );
if ( value < 10000000000000000L ) assertEquals( base + " ^ " + power, power == 1 ? base : ( long )Math.pow( base, power ), value );
assertEquals( base + " ^ " + power, pow( base, power ), value );
}
else
{
try
{
powExact( base, power );
fail( "Should overflow: " + base + " ^ " + power );
}
catch ( ArithmeticException e )
{
}
}
}
}
}
@Test( expected = ArithmeticException.class, timeout = 5000L )
public void powExactLongIntException()
{
powExact( 0L, -1 );
}
@Test( timeout = 5000L )
public void powExactLongIntSpecial()
{
for ( long i : LONGS ) if ( i != 0 ) for ( int j : INTS ) if ( j < 0 ) assertEquals( i == 1L ? 1L : i == -1L ? ( ( j & 1 ) == 1 ? -1L : 1L ) : 0L, powExact( i, j ) );
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.MapMakerInternalMap.Strength.SOFT;
import static com.google.common.collect.MapMakerInternalMap.Strength.STRONG;
import static com.google.common.collect.MapMakerInternalMap.Strength.WEAK;
import static com.google.common.collect.testing.IteratorFeature.SUPPORTS_REMOVE;
import static com.google.common.testing.SerializableTester.reserializeAndAssert;
import static java.util.Arrays.asList;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.isA;
import com.google.common.base.Equivalences;
import com.google.common.collect.MapMaker.RemovalListener;
import com.google.common.collect.MapMaker.RemovalNotification;
import com.google.common.collect.Multiset.Entry;
import com.google.common.collect.testing.IteratorTester;
import junit.framework.TestCase;
import org.easymock.EasyMock;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Test case for {@link ConcurrentHashMultiset}.
*
* @author Cliff L. Biffle
* @author mike nonemacher
*/
public class ConcurrentHashMultisetTest extends TestCase {
private static final String KEY = "puppies";
ConcurrentMap<String, AtomicInteger> backingMap;
ConcurrentHashMultiset<String> multiset;
@SuppressWarnings("unchecked")
@Override protected void setUp() {
backingMap = EasyMock.createMock(ConcurrentMap.class);
expect(backingMap.isEmpty()).andReturn(true);
replay();
multiset = new ConcurrentHashMultiset<String>(backingMap);
verify();
reset();
}
public void testCount_elementPresent() {
final int COUNT = 12;
expect(backingMap.get(KEY)).andReturn(new AtomicInteger(COUNT));
replay();
assertEquals(COUNT, multiset.count(KEY));
verify();
}
public void testCount_elementAbsent() {
expect(backingMap.get(KEY)).andReturn(null);
replay();
assertEquals(0, multiset.count(KEY));
verify();
}
public void testAdd_zero() {
final int INITIAL_COUNT = 32;
expect(backingMap.get(KEY)).andReturn(new AtomicInteger(INITIAL_COUNT));
replay();
assertEquals(INITIAL_COUNT, multiset.add(KEY, 0));
verify();
}
public void testAdd_firstFewWithSuccess() {
final int COUNT = 400;
expect(backingMap.get(KEY)).andReturn(null);
expect(backingMap.putIfAbsent(eq(KEY), isA(AtomicInteger.class))).andReturn(null);
replay();
assertEquals(0, multiset.add(KEY, COUNT));
verify();
}
public void testAdd_laterFewWithSuccess() {
int INITIAL_COUNT = 32;
int COUNT_TO_ADD = 400;
AtomicInteger initial = new AtomicInteger(INITIAL_COUNT);
expect(backingMap.get(KEY)).andReturn(initial);
replay();
assertEquals(INITIAL_COUNT, multiset.add(KEY, COUNT_TO_ADD));
assertEquals(INITIAL_COUNT + COUNT_TO_ADD, initial.get());
verify();
}
public void testAdd_laterFewWithOverflow() {
final int INITIAL_COUNT = 92384930;
final int COUNT_TO_ADD = Integer.MAX_VALUE - INITIAL_COUNT + 1;
expect(backingMap.get(KEY)).andReturn(new AtomicInteger(INITIAL_COUNT));
replay();
try {
multiset.add(KEY, COUNT_TO_ADD);
fail("Must reject arguments that would cause counter overflow.");
} catch (IllegalArgumentException e) {
// Expected.
}
verify();
}
/**
* Simulate some of the races that can happen on add. We can't easily simulate the race that
* happens when an {@link AtomicInteger#compareAndSet} fails, but we can simulate the case where
* the putIfAbsent returns a non-null value, and the case where the replace() of an observed
* zero fails.
*/
public void testAdd_withFailures() {
AtomicInteger existing = new AtomicInteger(12);
AtomicInteger existingZero = new AtomicInteger(0);
// initial map.get()
expect(backingMap.get(KEY)).andReturn(null);
// since get returned null, try a putIfAbsent; that fails due to a simulated race
expect(backingMap.putIfAbsent(eq(KEY), isA(AtomicInteger.class))).andReturn(existingZero);
// since the putIfAbsent returned a zero, we'll try to replace...
expect(backingMap.replace(eq(KEY), eq(existingZero), isA(AtomicInteger.class)))
.andReturn(false);
// ...and then putIfAbsent. Simulate failure on both
expect(backingMap.putIfAbsent(eq(KEY), isA(AtomicInteger.class))).andReturn(existing);
// next map.get()
expect(backingMap.get(KEY)).andReturn(existingZero);
// since get returned zero, try a replace; that fails due to a simulated race
expect(backingMap.replace(eq(KEY), eq(existingZero), isA(AtomicInteger.class)))
.andReturn(false);
expect(backingMap.putIfAbsent(eq(KEY), isA(AtomicInteger.class))).andReturn(existing);
// another map.get()
expect(backingMap.get(KEY)).andReturn(existing);
// we shouldn't see any more map operations; CHM will now just update the AtomicInteger
replay();
assertEquals(multiset.add(KEY, 3), 12);
assertEquals(15, existing.get());
verify();
}
public void testRemove_zeroFromSome() {
final int INITIAL_COUNT = 14;
expect(backingMap.get(KEY)).andReturn(new AtomicInteger(INITIAL_COUNT));
replay();
assertEquals(INITIAL_COUNT, multiset.remove(KEY, 0));
verify();
}
public void testRemove_zeroFromNone() {
expect(backingMap.get(KEY)).andReturn(null);
replay();
assertEquals(0, multiset.remove(KEY, 0));
verify();
}
public void testRemove_nonePresent() {
expect(backingMap.get(KEY)).andReturn(null);
replay();
assertEquals(0, multiset.remove(KEY, 400));
verify();
}
public void testRemove_someRemaining() {
int countToRemove = 30;
int countRemaining = 1;
AtomicInteger current = new AtomicInteger(countToRemove + countRemaining);
expect(backingMap.get(KEY)).andReturn(current);
replay();
assertEquals(countToRemove + countRemaining, multiset.remove(KEY, countToRemove));
assertEquals(countRemaining, current.get());
verify();
}
public void testRemove_noneRemaining() {
int countToRemove = 30;
AtomicInteger current = new AtomicInteger(countToRemove);
expect(backingMap.get(KEY)).andReturn(current);
// it's ok if removal fails: another thread may have done the remove
expect(backingMap.remove(KEY, current)).andReturn(false);
replay();
assertEquals(countToRemove, multiset.remove(KEY, countToRemove));
assertEquals(0, current.get());
verify();
}
public void testIteratorRemove_actualMap() {
// Override to avoid using mocks.
multiset = ConcurrentHashMultiset.create();
multiset.add(KEY);
multiset.add(KEY + "_2");
multiset.add(KEY);
int mutations = 0;
for (Iterator<String> it = multiset.iterator(); it.hasNext(); ) {
it.next();
it.remove();
mutations++;
}
assertTrue(multiset.isEmpty());
assertEquals(3, mutations);
}
public void testIterator() {
// multiset.iterator
List<String> expected = asList("a", "a", "b", "b", "b");
new IteratorTester<String>(
5, asList(SUPPORTS_REMOVE), expected, IteratorTester.KnownOrder.UNKNOWN_ORDER) {
ConcurrentHashMultiset<String> multiset;
@Override protected Iterator<String> newTargetIterator() {
multiset = ConcurrentHashMultiset.create();
multiset.add("a", 2);
multiset.add("b", 3);
return multiset.iterator();
}
@Override protected void verify(List<String> elements) {
super.verify(elements);
assertEquals(ImmutableMultiset.copyOf(elements), multiset);
}
}.test();
}
public void testEntryIterator() {
// multiset.entryIterator
List<Entry<String>> expected = asList(
Multisets.immutableEntry("a", 1),
Multisets.immutableEntry("b", 2),
Multisets.immutableEntry("c", 3),
Multisets.immutableEntry("d", 4),
Multisets.immutableEntry("e", 5));
new IteratorTester<Entry<String>>(
5, asList(SUPPORTS_REMOVE), expected, IteratorTester.KnownOrder.UNKNOWN_ORDER) {
ConcurrentHashMultiset<String> multiset;
@Override protected Iterator<Entry<String>> newTargetIterator() {
multiset = ConcurrentHashMultiset.create();
multiset.add("a", 1);
multiset.add("b", 2);
multiset.add("c", 3);
multiset.add("d", 4);
multiset.add("e", 5);
return multiset.entryIterator();
}
@Override protected void verify(List<Entry<String>> elements) {
super.verify(elements);
assertEquals(ImmutableSet.copyOf(elements), ImmutableSet.copyOf(multiset.entryIterator()));
}
}.test();
}
public void testSetCount_basic() {
int initialCount = 20;
int countToSet = 40;
AtomicInteger current = new AtomicInteger(initialCount);
expect(backingMap.get(KEY)).andReturn(current);
replay();
assertEquals(initialCount, multiset.setCount(KEY, countToSet));
assertEquals(countToSet, current.get());
verify();
}
public void testSetCount_asRemove() {
int countToRemove = 40;
AtomicInteger current = new AtomicInteger(countToRemove);
expect(backingMap.get(KEY)).andReturn(current);
expect(backingMap.remove(KEY, current)).andReturn(true);
replay();
assertEquals(countToRemove, multiset.setCount(KEY, 0));
assertEquals(0, current.get());
verify();
}
public void testSetCount_0_nonePresent() {
expect(backingMap.get(KEY)).andReturn(null);
replay();
assertEquals(0, multiset.setCount(KEY, 0));
verify();
}
public void testCreate() {
ConcurrentHashMultiset<Integer> multiset = ConcurrentHashMultiset.create();
assertTrue(multiset.isEmpty());
reserializeAndAssert(multiset);
}
public void testCreateFromIterable() {
Iterable<Integer> iterable = asList(1, 2, 2, 3, 4);
ConcurrentHashMultiset<Integer> multiset
= ConcurrentHashMultiset.create(iterable);
assertEquals(2, multiset.count(2));
reserializeAndAssert(multiset);
}
public void testIdentityKeyEquality_strongKeys() {
testIdentityKeyEquality(STRONG);
}
public void testIdentityKeyEquality_softKeys() {
testIdentityKeyEquality(SOFT);
}
public void testIdentityKeyEquality_weakKeys() {
testIdentityKeyEquality(WEAK);
}
private void testIdentityKeyEquality(
MapMakerInternalMap.Strength keyStrength) {
MapMaker mapMaker = new MapMaker()
.setKeyStrength(keyStrength)
.keyEquivalence(Equivalences.identity());
ConcurrentHashMultiset<String> multiset =
ConcurrentHashMultiset.create(mapMaker);
String s1 = new String("a");
String s2 = new String("a");
assertEquals(s1, s2); // Stating the obvious.
assertTrue(s1 != s2); // Stating the obvious.
multiset.add(s1);
assertTrue(multiset.contains(s1));
assertFalse(multiset.contains(s2));
assertEquals(1, multiset.count(s1));
assertEquals(0, multiset.count(s2));
multiset.add(s1);
multiset.add(s2, 3);
assertEquals(2, multiset.count(s1));
assertEquals(3, multiset.count(s2));
multiset.remove(s1);
assertEquals(1, multiset.count(s1));
assertEquals(3, multiset.count(s2));
}
public void testLogicalKeyEquality_strongKeys() {
testLogicalKeyEquality(STRONG);
}
public void testLogicalKeyEquality_softKeys() {
testLogicalKeyEquality(SOFT);
}
public void testLogicalKeyEquality_weakKeys() {
testLogicalKeyEquality(WEAK);
}
private void testLogicalKeyEquality(
MapMakerInternalMap.Strength keyStrength) {
MapMaker mapMaker = new MapMaker()
.setKeyStrength(keyStrength)
.keyEquivalence(Equivalences.equals());
ConcurrentHashMultiset<String> multiset =
ConcurrentHashMultiset.create(mapMaker);
String s1 = new String("a");
String s2 = new String("a");
assertEquals(s1, s2); // Stating the obvious.
multiset.add(s1);
assertTrue(multiset.contains(s1));
assertTrue(multiset.contains(s2));
assertEquals(1, multiset.count(s1));
assertEquals(1, multiset.count(s2));
multiset.add(s2, 3);
assertEquals(4, multiset.count(s1));
assertEquals(4, multiset.count(s2));
multiset.remove(s1);
assertEquals(3, multiset.count(s1));
assertEquals(3, multiset.count(s2));
}
public void testSerializationWithMapMaker1() {
MapMaker mapMaker = new MapMaker();
multiset = ConcurrentHashMultiset.create(mapMaker);
reserializeAndAssert(multiset);
}
public void testSerializationWithMapMaker2() {
MapMaker mapMaker = new MapMaker();
multiset = ConcurrentHashMultiset.create(mapMaker);
multiset.addAll(ImmutableList.of("a", "a", "b", "c", "d", "b"));
reserializeAndAssert(multiset);
}
public void testSerializationWithMapMaker3() {
MapMaker mapMaker = new MapMaker().expireAfterWrite(1, TimeUnit.SECONDS);
multiset = ConcurrentHashMultiset.create(mapMaker);
multiset.addAll(ImmutableList.of("a", "a", "b", "c", "d", "b"));
reserializeAndAssert(multiset);
}
public void testSerializationWithMapMaker_preservesIdentityKeyEquivalence() {
MapMaker mapMaker = new MapMaker()
.keyEquivalence(Equivalences.identity());
ConcurrentHashMultiset<String> multiset =
ConcurrentHashMultiset.create(mapMaker);
multiset = reserializeAndAssert(multiset);
String s1 = new String("a");
String s2 = new String("a");
assertEquals(s1, s2); // Stating the obvious.
assertTrue(s1 != s2); // Stating the obvious.
multiset.add(s1);
assertTrue(multiset.contains(s1));
assertFalse(multiset.contains(s2));
assertEquals(1, multiset.count(s1));
assertEquals(0, multiset.count(s2));
}
// @Suppress(owner = "bmanes", detail = "Does not call the eviction listener")
// public void testWithMapMakerEvictionListener_BROKEN1()
// throws InterruptedException {
// MapEvictionListener<String, Number> evictionListener =
// mockEvictionListener();
// evictionListener.onEviction("a", 5);
// EasyMock.replay(evictionListener);
//
// GenericMapMaker<String, Number> mapMaker = new MapMaker()
// .expireAfterWrite(100, TimeUnit.MILLISECONDS)
// .evictionListener(evictionListener);
//
// ConcurrentHashMultiset<String> multiset =
// ConcurrentHashMultiset.create(mapMaker);
//
// multiset.add("a", 5);
//
// assertTrue(multiset.contains("a"));
// assertEquals(5, multiset.count("a"));
//
// Thread.sleep(2000);
//
// EasyMock.verify(evictionListener);
// }
// @Suppress(owner = "bmanes", detail = "Does not call the eviction listener")
// public void testWithMapMakerEvictionListener_BROKEN2()
// throws InterruptedException {
// MapEvictionListener<String, Number> evictionListener =
// mockEvictionListener();
// evictionListener.onEviction("a", 5);
// EasyMock.replay(evictionListener);
//
// GenericMapMaker<String, Number> mapMaker = new MapMaker()
// .expireAfterWrite(100, TimeUnit.MILLISECONDS)
// .evictionListener(evictionListener);
//
// ConcurrentHashMultiset<String> multiset =
// ConcurrentHashMultiset.create(mapMaker);
//
// multiset.add("a", 5);
//
// assertTrue(multiset.contains("a"));
// assertEquals(5, multiset.count("a"));
//
// Thread.sleep(2000);
//
// // This call should have the side-effect of calling the
// // eviction listener, but it does not.
// assertFalse(multiset.contains("a"));
//
// EasyMock.verify(evictionListener);
// }
public void testWithMapMakerEvictionListener() {
final List<RemovalNotification<String, Number>> notificationQueue = Lists.newArrayList();
RemovalListener<String, Number> removalListener =
new RemovalListener<String, Number>() {
@Override public void onRemoval(RemovalNotification<String, Number> notification) {
notificationQueue.add(notification);
}
};
@SuppressWarnings("deprecation") // TODO(kevinb): what to do?
GenericMapMaker<String, Number> mapMaker = new MapMaker()
.concurrencyLevel(1)
.maximumSize(1)
.removalListener(removalListener);
ConcurrentHashMultiset<String> multiset = ConcurrentHashMultiset.create(mapMaker);
multiset.add("a", 5);
assertTrue(multiset.contains("a"));
assertEquals(5, multiset.count("a"));
multiset.add("b", 3);
assertFalse(multiset.contains("a"));
assertTrue(multiset.contains("b"));
assertEquals(3, multiset.count("b"));
RemovalNotification<String, Number> notification = Iterables.getOnlyElement(notificationQueue);
assertEquals("a", notification.getKey());
// The map evicted this entry, so CHM didn't have a chance to zero it.
assertEquals(5, notification.getValue().intValue());
}
private void replay() {
EasyMock.replay(backingMap);
}
private void verify() {
EasyMock.verify(backingMap);
}
private void reset() {
EasyMock.reset(backingMap);
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2015, Red Hat, Inc., and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// SAXCatalogReader.java - Read XML Catalog files
package org.jboss.util.xml.catalog.readers;
import java.util.Hashtable;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.parsers.SAXParser;
import org.xml.sax.AttributeList;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.DocumentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.Parser;
import org.xml.sax.SAXException;
import org.jboss.util.xml.catalog.Catalog;
import org.jboss.util.xml.catalog.CatalogException;
import org.jboss.util.xml.catalog.CatalogManager;
import org.jboss.util.xml.catalog.helpers.Debug;
import org.jboss.util.xml.catalog.readers.CatalogReader;
/**
* A SAX-based CatalogReader.
*
* <p>This class is used to read XML Catalogs using the SAX. This reader
* has an advantage over the DOM-based reader in that it functions on
* the stream of SAX events. It has the disadvantage
* that it cannot look around in the tree.</p>
*
* <p>Since the choice of CatalogReaders (in the InputStream case) can only
* be made on the basis of MIME type, the following problem occurs: only
* one CatalogReader can exist for all XML mime types. In order to get
* around this problem, the SAXCatalogReader relies on a set of external
* CatalogParsers to actually build the catalog.</p>
*
* <p>The selection of CatalogParsers is made on the basis of the QName
* of the root element of the document.</p>
*
* @see Catalog
* @see CatalogReader
* @see SAXCatalogReader
* @see TextCatalogReader
* @see DOMCatalogParser
*
* @author Norman Walsh
* <a href="mailto:Norman.Walsh@Sun.COM">Norman.Walsh@Sun.COM</a>
*
* @version 1.0
*/
@SuppressWarnings({"unchecked", "deprecation"})
public class SAXCatalogReader implements CatalogReader, ContentHandler, DocumentHandler {
/** The SAX Parser Factory */
protected SAXParserFactory parserFactory = null;
/** The SAX Parser Class */
protected String parserClass = null;
/**
* Mapping table from QNames to CatalogParser classes.
*
* <p>Each key in this hash table has the form "elementname"
* or "{namespaceuri}elementname". The former is used if the
* namespace URI is null.</p>
*/
protected Hashtable namespaceMap = new Hashtable();
/** The parser in use for the current catalog. */
private SAXCatalogParser saxParser = null;
/** Set if something goes horribly wrong. It allows the class to
* ignore the rest of the events that are received.
*/
private boolean abandonHope = false;
/** The Catalog that we're working for. */
private Catalog catalog;
/** Set the XML SAX Parser Factory.
* @param parserFactory
*/
public void setParserFactory(SAXParserFactory parserFactory) {
this.parserFactory = parserFactory;
}
/** Set the XML SAX Parser Class
* @param parserClass
*/
public void setParserClass(String parserClass) {
this.parserClass = parserClass;
}
/** @return the parser factory currently in use. */
public SAXParserFactory getParserFactory() {
return parserFactory;
}
/** @return the parser class currently in use. */
public String getParserClass() {
return parserClass;
}
/** The debug class to use for this reader.
*
* This is a bit of a hack. Anyway, whenever we read for a catalog,
* we extract the debug object
* from the catalog's manager so that we can use it to print messages.
*
* In production, we don't really expect any messages so it doesn't
* really matter. But it's still a bit of a hack.
*/
protected Debug debug = CatalogManager.getStaticManager().debug;
/** The constructor */
public SAXCatalogReader() {
parserFactory = null;
parserClass = null;
}
/** The constructor
* @param parserFactory */
public SAXCatalogReader(SAXParserFactory parserFactory) {
this.parserFactory = parserFactory;
}
/** The constructor
* @param parserClass */
public SAXCatalogReader(String parserClass) {
this.parserClass = parserClass;
}
/** Set the SAXCatalogParser class for the given namespace/root
* element type.
* @param namespaceURI
* @param rootElement
* @param parserClass
*/
public void setCatalogParser(String namespaceURI,
String rootElement,
String parserClass) {
if (namespaceURI == null) {
namespaceMap.put(rootElement, parserClass);
} else {
namespaceMap.put("{"+namespaceURI+"}"+rootElement, parserClass);
}
}
/** Get the SAXCatalogParser class for the given namespace/root
* element type.
* @param namespaceURI
* @param rootElement
* @return the catalog parser
*/
public String getCatalogParser(String namespaceURI,
String rootElement) {
if (namespaceURI == null) {
return (String) namespaceMap.get(rootElement);
} else {
return (String) namespaceMap.get("{"+namespaceURI+"}"+rootElement);
}
}
/**
* Parse an XML Catalog file.
*
* @param catalog The catalog to which this catalog file belongs
* @param fileUrl The URL or filename of the catalog file to process
*
* @throws MalformedURLException Improper fileUrl
* @throws IOException Error reading catalog file
*/
public void readCatalog(Catalog catalog, String fileUrl)
throws MalformedURLException, IOException,
CatalogException {
URL url = null;
try {
url = new URL(fileUrl);
} catch (MalformedURLException e) {
url = new URL("file:///" + fileUrl);
}
debug = catalog.getCatalogManager().debug;
try {
URLConnection urlCon = url.openConnection();
readCatalog(catalog, urlCon.getInputStream());
} catch (FileNotFoundException e) {
catalog.getCatalogManager().debug.message(1, "Failed to load catalog, file not found",
url.toString());
}
}
/**
* Parse an XML Catalog stream.
*
* @param catalog The catalog to which this catalog file belongs
* @param is The input stream from which the catalog will be read
*
* @throws MalformedURLException Improper fileUrl
* @throws IOException Error reading catalog file
* @throws CatalogException A Catalog exception
*/
public void readCatalog(Catalog catalog, InputStream is)
throws IOException, CatalogException {
// Create an instance of the parser
if (parserFactory == null && parserClass == null) {
debug.message(1, "Cannot read SAX catalog without a parser");
throw new CatalogException(CatalogException.UNPARSEABLE);
}
debug = catalog.getCatalogManager().debug;
EntityResolver bResolver = catalog.getCatalogManager().getBootstrapResolver();
this.catalog = catalog;
try {
if (parserFactory != null) {
SAXParser parser = parserFactory.newSAXParser();
SAXParserHandler spHandler = new SAXParserHandler();
spHandler.setContentHandler(this);
if (bResolver != null) {
spHandler.setEntityResolver(bResolver);
}
parser.parse(new InputSource(is), spHandler);
} else {
Parser parser = (Parser) Class.forName(parserClass).newInstance();
parser.setDocumentHandler(this);
if (bResolver != null) {
parser.setEntityResolver(bResolver);
}
parser.parse(new InputSource(is));
}
} catch (ClassNotFoundException cnfe) {
throw new CatalogException(CatalogException.UNPARSEABLE);
} catch (IllegalAccessException iae) {
throw new CatalogException(CatalogException.UNPARSEABLE);
} catch (InstantiationException ie) {
throw new CatalogException(CatalogException.UNPARSEABLE);
} catch (ParserConfigurationException pce) {
throw new CatalogException(CatalogException.UNKNOWN_FORMAT);
} catch (SAXException se) {
Exception e = se.getException();
// FIXME: there must be a better way
UnknownHostException uhe = new UnknownHostException();
FileNotFoundException fnfe = new FileNotFoundException();
if (e != null) {
if (e.getClass() == uhe.getClass()) {
throw new CatalogException(CatalogException.PARSE_FAILED,
e.toString());
} else if (e.getClass() == fnfe.getClass()) {
throw new CatalogException(CatalogException.PARSE_FAILED,
e.toString());
}
}
throw new CatalogException(se);
}
}
// ----------------------------------------------------------------------
// Implement the SAX ContentHandler interface
/** The SAX <code>setDocumentLocator</code> method. Does nothing. */
public void setDocumentLocator (Locator locator) {
if (saxParser != null) {
saxParser.setDocumentLocator(locator);
}
}
/** The SAX <code>startDocument</code> method. Does nothing. */
public void startDocument () throws SAXException {
saxParser = null;
abandonHope = false;
return;
}
/** The SAX <code>endDocument</code> method. Does nothing. */
public void endDocument ()throws SAXException {
if (saxParser != null) {
saxParser.endDocument();
}
}
/**
* The SAX <code>startElement</code> method.
*
* <p>The catalog parser is selected based on the namespace of the
* first element encountered in the catalog.</p>
*/
public void startElement (String name,
AttributeList atts)
throws SAXException {
if (abandonHope) {
return;
}
if (saxParser == null) {
String prefix = "";
if (name.indexOf(':') > 0) {
prefix = name.substring(0, name.indexOf(':'));
}
String localName = name;
if (localName.indexOf(':') > 0) {
localName = localName.substring(localName.indexOf(':')+1);
}
String namespaceURI = null;
if (prefix.equals("")) {
namespaceURI = atts.getValue("xmlns");
} else {
namespaceURI = atts.getValue("xmlns:" + prefix);
}
String saxParserClass = getCatalogParser(namespaceURI,
localName);
if (saxParserClass == null) {
abandonHope = true;
if (namespaceURI == null) {
debug.message(2, "No Catalog parser for " + name);
} else {
debug.message(2, "No Catalog parser for "
+ "{" + namespaceURI + "}"
+ name);
}
return;
}
try {
saxParser = (SAXCatalogParser)
Class.forName(saxParserClass).newInstance();
saxParser.setCatalog(catalog);
saxParser.startDocument();
saxParser.startElement(name, atts);
} catch (ClassNotFoundException cnfe) {
saxParser = null;
abandonHope = true;
debug.message(2, cnfe.toString());
} catch (InstantiationException ie) {
saxParser = null;
abandonHope = true;
debug.message(2, ie.toString());
} catch (IllegalAccessException iae) {
saxParser = null;
abandonHope = true;
debug.message(2, iae.toString());
} catch (ClassCastException cce ) {
saxParser = null;
abandonHope = true;
debug.message(2, cce.toString());
}
} else {
saxParser.startElement(name, atts);
}
}
/**
* The SAX2 <code>startElement</code> method.
*
* <p>The catalog parser is selected based on the namespace of the
* first element encountered in the catalog.</p>
*/
public void startElement (String namespaceURI,
String localName,
String qName,
Attributes atts)
throws SAXException {
if (abandonHope) {
return;
}
if (saxParser == null) {
String saxParserClass = getCatalogParser(namespaceURI,
localName);
if (saxParserClass == null) {
abandonHope = true;
if (namespaceURI == null) {
debug.message(2, "No Catalog parser for " + localName);
} else {
debug.message(2, "No Catalog parser for "
+ "{" + namespaceURI + "}"
+ localName);
}
return;
}
try {
saxParser = (SAXCatalogParser)
Class.forName(saxParserClass).newInstance();
saxParser.setCatalog(catalog);
saxParser.startDocument();
saxParser.startElement(namespaceURI, localName, qName, atts);
} catch (ClassNotFoundException cnfe) {
saxParser = null;
abandonHope = true;
debug.message(2, cnfe.toString());
} catch (InstantiationException ie) {
saxParser = null;
abandonHope = true;
debug.message(2, ie.toString());
} catch (IllegalAccessException iae) {
saxParser = null;
abandonHope = true;
debug.message(2, iae.toString());
} catch (ClassCastException cce ) {
saxParser = null;
abandonHope = true;
debug.message(2, cce.toString());
}
} else {
saxParser.startElement(namespaceURI, localName, qName, atts);
}
}
/** The SAX <code>endElement</code> method. Does nothing. */
public void endElement (String name) throws SAXException {
if (saxParser != null) {
saxParser.endElement(name);
}
}
/** The SAX2 <code>endElement</code> method. Does nothing. */
public void endElement (String namespaceURI,
String localName,
String qName) throws SAXException {
if (saxParser != null) {
saxParser.endElement(namespaceURI, localName, qName);
}
}
/** The SAX <code>characters</code> method. Does nothing. */
public void characters (char ch[], int start, int length)
throws SAXException {
if (saxParser != null) {
saxParser.characters(ch, start, length);
}
}
/** The SAX <code>ignorableWhitespace</code> method. Does nothing. */
public void ignorableWhitespace (char ch[], int start, int length)
throws SAXException {
if (saxParser != null) {
saxParser.ignorableWhitespace(ch, start, length);
}
}
/** The SAX <code>processingInstruction</code> method. Does nothing. */
public void processingInstruction (String target, String data)
throws SAXException {
if (saxParser != null) {
saxParser.processingInstruction(target, data);
}
}
/** The SAX <code>startPrefixMapping</code> method. Does nothing. */
public void startPrefixMapping (String prefix, String uri)
throws SAXException {
if (saxParser != null) {
saxParser.startPrefixMapping (prefix, uri);
}
}
/** The SAX <code>endPrefixMapping</code> method. Does nothing. */
public void endPrefixMapping (String prefix)
throws SAXException {
if (saxParser != null) {
saxParser.endPrefixMapping (prefix);
}
}
/** The SAX <code>skippedentity</code> method. Does nothing. */
public void skippedEntity (String name)
throws SAXException {
if (saxParser != null) {
saxParser.skippedEntity(name);
}
}
}
| |
package com.example.android.lifecycle;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
/*
* This tag will be used for logging. It is best practice to use the class's name using
* getSimpleName as that will greatly help to identify the location from which your logs are
* being posted.
*/
private static final String TAG = MainActivity.class.getSimpleName();
/*
* This constant String will be used to store the content of the TextView used to display the
* list of callbacks. The reason we are storing the contents of the TextView is so that you can
* see the entire set of callbacks as they are called.
*/
private static final String LIFECYCLE_CALLBACKS_TEXT_KEY = "callbacks";
/* Constant values for the names of each respective lifecycle callback */
private static final String ON_CREATE = "onCreate";
private static final String ON_START = "onStart";
private static final String ON_RESUME = "onResume";
private static final String ON_PAUSE = "onPause";
private static final String ON_STOP = "onStop";
private static final String ON_RESTART = "onRestart";
private static final String ON_DESTROY = "onDestroy";
private static final String ON_SAVE_INSTANCE_STATE = "onSaveInstanceState";
/*
* This TextView will contain a running log of every lifecycle callback method called from this
* Activity. This TextView can be reset to its default state by clicking the Button labeled
* "Reset Log"
*/
private TextView mLifecycleDisplay;
// TODO (1) Declare and instantiate a static ArrayList of Strings called mLifecycleCallbacks
private static ArrayList<String> mLifecycleCallbacks = new ArrayList<String>();
/**
* Called when the activity is first created. This is where you should do all of your normal
* static set up: create views, bind data to lists, etc.
*
* Always followed by onStart().
*
* @param savedInstanceState The Activity's previously frozen state, if there was one.
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mLifecycleDisplay = (TextView) findViewById(R.id.tv_lifecycle_events_display);
/*
* If savedInstanceState is not null, that means our Activity is not being started for the
* first time. Even if the savedInstanceState is not null, it is smart to check if the
* bundle contains the key we are looking for. In our case, the key we are looking for maps
* to the contents of the TextView that displays our list of callbacks. If the bundle
* contains that key, we set the contents of the TextView accordingly.
*/
if (savedInstanceState != null) {
if (savedInstanceState.containsKey(LIFECYCLE_CALLBACKS_TEXT_KEY)) {
String allPreviousLifecycleCallbacks = savedInstanceState
.getString(LIFECYCLE_CALLBACKS_TEXT_KEY);
mLifecycleDisplay.setText(allPreviousLifecycleCallbacks);
}
}
// TODO (4) Iterate backwards through mLifecycleCallbacks, appending each String and a newline to mLifecycleDisplay
if (mLifecycleCallbacks.size() != 0) {
for (int i = mLifecycleCallbacks.size() - 1; i >= 0; i--) {
mLifecycleDisplay.append(mLifecycleCallbacks.get(i) + "\n");
}
mLifecycleCallbacks.clear();
}
// TODO (5) Clear mLifecycleCallbacks after iterating through it
logAndAppend(ON_CREATE);
}
/**
* Called when the activity is becoming visible to the user.
*
* Followed by onResume() if the activity comes to the foreground, or onStop() if it becomes
* hidden.
*/
@Override
protected void onStart() {
super.onStart();
logAndAppend(ON_START);
}
/**
* Called when the activity will start interacting with the user. At this point your activity
* is at the top of the activity stack, with user input going to it.
*
* Always followed by onPause().
*/
@Override
protected void onResume() {
super.onResume();
logAndAppend(ON_RESUME);
}
/**
* Called when the system is about to start resuming a previous activity. This is typically
* used to commit unsaved changes to persistent data, stop animations and other things that may
* be consuming CPU, etc. Implementations of this method must be very quick because the next
* activity will not be resumed until this method returns.
*
* Followed by either onResume() if the activity returns back to the front, or onStop() if it
* becomes invisible to the user.
*/
@Override
protected void onPause() {
super.onPause();
logAndAppend(ON_PAUSE);
}
/**
* Called when the activity is no longer visible to the user, because another activity has been
* resumed and is covering this one. This may happen either because a new activity is being
* started, an existing one is being brought in front of this one, or this one is being
* destroyed.
*
* Followed by either onRestart() if this activity is coming back to interact with the user, or
* onDestroy() if this activity is going away.
*/
@Override
protected void onStop() {
super.onStop();
// TODO (2) Add the ON_STOP String to the front of mLifecycleCallbacks
mLifecycleCallbacks.add(0, ON_STOP);
logAndAppend(ON_STOP);
}
/**
* Called after your activity has been stopped, prior to it being started again.
*
* Always followed by onStart()
*/
@Override
protected void onRestart() {
super.onRestart();
logAndAppend(ON_RESTART);
}
/**
* The final call you receive before your activity is destroyed. This can happen either because
* the activity is finishing (someone called finish() on it, or because the system is
* temporarily destroying this instance of the activity to save space. You can distinguish
* between these two scenarios with the isFinishing() method.
*/
@Override
protected void onDestroy() {
super.onDestroy();
// TODO (3) Add the ON_DESTROY String to the front of mLifecycleCallbacks
mLifecycleCallbacks.add(0, ON_DESTROY);
logAndAppend(ON_DESTROY);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
logAndAppend(ON_SAVE_INSTANCE_STATE);
String lifecycleDisplayTextViewContents = mLifecycleDisplay.getText().toString();
outState.putString(LIFECYCLE_CALLBACKS_TEXT_KEY, lifecycleDisplayTextViewContents);
}
/**
* Logs to the console and appends the lifecycle method name to the TextView so that you can
* view the series of method callbacks that are called both from the app and from within
* Android Studio's Logcat.
*
* @param lifecycleEvent The name of the event to be logged.
*/
private void logAndAppend(String lifecycleEvent) {
Log.d(TAG, "Lifecycle Event: " + lifecycleEvent);
mLifecycleDisplay.append(lifecycleEvent + "\n");
}
/**
* This method resets the contents of the TextView to its default text of "Lifecycle callbacks"
*
* @param view The View that was clicked. In this case, it is the Button from our layout.
*/
public void resetLifecycleDisplay(View view) {
mLifecycleDisplay.setText("Lifecycle callbacks:\n");
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.market.sensitivity;
import static com.opengamma.strata.basics.currency.Currency.EUR;
import static com.opengamma.strata.basics.currency.Currency.GBP;
import static com.opengamma.strata.basics.currency.Currency.JPY;
import static com.opengamma.strata.basics.currency.Currency.USD;
import static com.opengamma.strata.collect.TestHelper.assertSerialization;
import static com.opengamma.strata.collect.TestHelper.assertThrowsIllegalArg;
import static com.opengamma.strata.collect.TestHelper.coverBeanEquals;
import static com.opengamma.strata.collect.TestHelper.coverImmutableBean;
import static com.opengamma.strata.collect.TestHelper.date;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertSame;
import java.time.LocalDate;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.opengamma.strata.basics.currency.CurrencyPair;
import com.opengamma.strata.basics.currency.FxMatrix;
/**
* Test {@link FxForwardSensitivity}.
*/
@Test
public class FxForwardSensitivityTest {
private static final CurrencyPair CURRENCY_PAIR = CurrencyPair.of(EUR, GBP);
private static final LocalDate REFERENCE_DATE = LocalDate.of(2015, 11, 23);
private static final double SENSITIVITY = 1.34d;
public void test_of_withoutCurrency() {
FxForwardSensitivity test = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
assertEquals(test.getCurrency(), EUR);
assertEquals(test.getCurrencyPair(), CURRENCY_PAIR);
assertEquals(test.getReferenceCounterCurrency(), EUR);
assertEquals(test.getReferenceCurrency(), GBP);
assertEquals(test.getReferenceDate(), REFERENCE_DATE);
assertEquals(test.getSensitivity(), SENSITIVITY);
}
public void test_of_withCurrency() {
FxForwardSensitivity test = FxForwardSensitivity.of(CURRENCY_PAIR, EUR, REFERENCE_DATE, USD, SENSITIVITY);
assertEquals(test.getCurrency(), USD);
assertEquals(test.getCurrencyPair(), CURRENCY_PAIR);
assertEquals(test.getReferenceCounterCurrency(), GBP);
assertEquals(test.getReferenceCurrency(), EUR);
assertEquals(test.getReferenceDate(), REFERENCE_DATE);
assertEquals(test.getSensitivity(), SENSITIVITY);
}
public void test_of_wrongRefCurrency() {
assertThrowsIllegalArg(() -> FxForwardSensitivity.of(CURRENCY_PAIR, USD, REFERENCE_DATE, SENSITIVITY));
assertThrowsIllegalArg(() -> FxForwardSensitivity.of(CURRENCY_PAIR, USD, REFERENCE_DATE, USD, SENSITIVITY));
}
//-------------------------------------------------------------------------
public void test_withCurrency_same() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.withCurrency(EUR);
assertEquals(test, base);
}
public void test_withCurrency_other() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.withCurrency(USD);
assertEquals(test.getCurrency(), USD);
assertEquals(test.getCurrencyPair(), CURRENCY_PAIR);
assertEquals(test.getReferenceCounterCurrency(), EUR);
assertEquals(test.getReferenceCurrency(), GBP);
assertEquals(test.getReferenceDate(), REFERENCE_DATE);
assertEquals(test.getSensitivity(), SENSITIVITY);
}
//-------------------------------------------------------------------------
public void test_withSensitivity() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.withSensitivity(13.5d);
assertEquals(test.getCurrency(), EUR);
assertEquals(test.getCurrencyPair(), CURRENCY_PAIR);
assertEquals(test.getReferenceCounterCurrency(), EUR);
assertEquals(test.getReferenceCurrency(), GBP);
assertEquals(test.getReferenceDate(), REFERENCE_DATE);
assertEquals(test.getSensitivity(), 13.5d);
}
//-------------------------------------------------------------------------
public void test_compareKey() {
FxForwardSensitivity a1 = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, EUR, SENSITIVITY);
FxForwardSensitivity a2 = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, EUR, SENSITIVITY);
FxForwardSensitivity b = FxForwardSensitivity.of(CurrencyPair.of(GBP, USD), GBP, REFERENCE_DATE, EUR, SENSITIVITY);
FxForwardSensitivity c = FxForwardSensitivity.of(CURRENCY_PAIR, EUR, REFERENCE_DATE, GBP, SENSITIVITY);
FxForwardSensitivity d = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, JPY, SENSITIVITY);
FxForwardSensitivity e = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, date(2015, 9, 27), SENSITIVITY);
ZeroRateSensitivity other = ZeroRateSensitivity.of(GBP, date(2015, 9, 27), SENSITIVITY);
assertEquals(a1.compareKey(a2), 0);
assertEquals(a1.compareKey(b) < 0, true);
assertEquals(b.compareKey(a1) > 0, true);
assertEquals(a1.compareKey(c) < 0, true);
assertEquals(c.compareKey(a1) > 0, true);
assertEquals(a1.compareKey(d) < 0, true);
assertEquals(d.compareKey(a1) > 0, true);
assertEquals(a1.compareKey(e) > 0, true);
assertEquals(e.compareKey(a1) < 0, true);
assertEquals(a1.compareKey(other) < 0, true);
assertEquals(other.compareKey(a1) > 0, true);
}
//-------------------------------------------------------------------------
public void test_convertedTo() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
double rate = 1.4d;
FxMatrix matrix = FxMatrix.of(CurrencyPair.of(EUR, USD), rate);
FxForwardSensitivity test1 = (FxForwardSensitivity) base.convertedTo(USD, matrix);
FxForwardSensitivity expected = FxForwardSensitivity.of(
CURRENCY_PAIR, GBP, REFERENCE_DATE, USD, SENSITIVITY * rate);
assertEquals(test1, expected);
FxForwardSensitivity test2 = (FxForwardSensitivity) base.convertedTo(EUR, matrix);
assertEquals(test2, base);
}
//-------------------------------------------------------------------------
public void test_multipliedBy() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.multipliedBy(2.4d);
FxForwardSensitivity expected = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY * 2.4d);
assertEquals(test, expected);
}
//-------------------------------------------------------------------------
public void test_mapSensitivity() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.mapSensitivity(s -> 1d / s);
FxForwardSensitivity expected = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, 1d / SENSITIVITY);
assertEquals(test, expected);
}
//-------------------------------------------------------------------------
public void test_normalize() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.normalize();
assertEquals(test, base);
}
//-------------------------------------------------------------------------
public void test_combinedWith() {
FxForwardSensitivity base1 = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity base2 = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, 1.56d);
MutablePointSensitivities expected = new MutablePointSensitivities();
expected.add(base1).add(base2);
PointSensitivityBuilder test = base1.combinedWith(base2);
assertEquals(test, expected);
}
public void test_combinedWith_mutable() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
MutablePointSensitivities expected = new MutablePointSensitivities();
expected.add(base);
PointSensitivityBuilder test = base.combinedWith(new MutablePointSensitivities());
assertEquals(test, expected);
}
//-------------------------------------------------------------------------
public void test_buildInto() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
MutablePointSensitivities combo = new MutablePointSensitivities();
MutablePointSensitivities test = base.buildInto(combo);
assertSame(test, combo);
assertEquals(test.getSensitivities(), ImmutableList.of(base));
}
//-------------------------------------------------------------------------
public void test_build() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
PointSensitivities test = base.build();
assertEquals(test.getSensitivities(), ImmutableList.of(base));
}
//-------------------------------------------------------------------------
public void test_cloned() {
FxForwardSensitivity base = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
FxForwardSensitivity test = base.cloned();
assertSame(test, base);
}
//-------------------------------------------------------------------------
public void coverage() {
FxForwardSensitivity test1 = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
coverImmutableBean(test1);
FxForwardSensitivity test2 = FxForwardSensitivity.of(CurrencyPair.of(USD, JPY), JPY, date(2015, 9, 27), 4.25d);
coverBeanEquals(test1, test2);
}
public void test_serialization() {
FxForwardSensitivity test = FxForwardSensitivity.of(CURRENCY_PAIR, GBP, REFERENCE_DATE, SENSITIVITY);
assertSerialization(test);
}
}
| |
/*
* Copyright (C) 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.pustike.eventbus;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Test case for {@link EventBus}.
* @author Cliff Biffle
*/
public class EventBusTest extends TestCase {
private static final String EVENT = "Hello";
private static final String BUS_IDENTIFIER = "test-bus";
private EventBus bus;
@Override
protected void setUp() throws Exception {
super.setUp();
bus = new EventBus(BUS_IDENTIFIER);
}
public void testBasicCatcherDistribution() {
StringCatcher catcher = new StringCatcher();
bus.register(catcher);
bus.publish(EVENT);
List<String> events = catcher.getEvents();
assertEquals("Only one event should be delivered.", 1, events.size());
assertEquals("Correct string should be delivered.", EVENT, events.get(0));
}
/**
* Tests that events are distributed to any subscribers to their type or any supertype, including interfaces and
* superclasses.
*
* Also checks delivery ordering in such cases.
*/
public void testPolymorphicDistribution() {
// Three catchers for related types String, Object, and Comparable<?>.
// String isa Object
// String isa Comparable<?>
// Comparable<?> isa Object
StringCatcher stringCatcher = new StringCatcher();
final List<Object> objectEvents = new ArrayList<>();
Object objCatcher = new Object() {
@SuppressWarnings("unused")
@Subscribe
public void eat(Object food) {
objectEvents.add(food);
}
};
final List<Comparable<?>> compEvents = new ArrayList<>();
Object compCatcher = new Object() {
@SuppressWarnings("unused")
@Subscribe
public <E> void eat(Comparable<E> food) {
compEvents.add(food);
}
};
bus.register(stringCatcher);
bus.register(objCatcher);
bus.register(compCatcher);
// Two additional event types: Object and Comparable<?> (played by Integer)
Object objEvent = new Object();
Object compEvent = 6;
bus.publish(EVENT);
bus.publish(objEvent);
bus.publish(compEvent);
// Check the StringCatcher...
List<String> stringEvents = stringCatcher.getEvents();
assertEquals("Only one String should be delivered.",
1, stringEvents.size());
assertEquals("Correct string should be delivered.",
EVENT, stringEvents.get(0));
// Check the Catcher<Object>...
assertEquals("Three Objects should be delivered.",
3, objectEvents.size());
assertEquals("String fixture must be first object delivered.",
EVENT, objectEvents.get(0));
assertEquals("Object fixture must be second object delivered.",
objEvent, objectEvents.get(1));
assertEquals("Comparable fixture must be thirdobject delivered.",
compEvent, objectEvents.get(2));
// Check the Catcher<Comparable<?>>...
assertEquals("Two Comparable<?>s should be delivered.",
2, compEvents.size());
assertEquals("String fixture must be first comparable delivered.",
EVENT, compEvents.get(0));
assertEquals("Comparable fixture must be second comparable delivered.",
compEvent, compEvents.get(1));
}
public void testSubscriberThrowsException() throws Exception {
final RecordingSubscriberExceptionHandler exceptionHandler =
new RecordingSubscriberExceptionHandler();
final EventBus eventBus = new EventBus();
eventBus.register(exceptionHandler);
final RuntimeException exception =
new RuntimeException("but culottes have a tendancy to ride up!");
final Object subscriber = new Object() {
@Subscribe
public void throwExceptionOn(String message) {
throw exception;
}
};
eventBus.register(subscriber);
eventBus.publish(EVENT);
assertEquals("Cause should be available.",
exception, exceptionHandler.exception);
assertEquals("EventBus should be available.",
eventBus, exceptionHandler.exceptionEvent.getEventBus());
assertEquals("Event should be available.",
EVENT,
exceptionHandler.exceptionEvent.getEvent());
assertEquals("Subscriber should be available.",
subscriber, exceptionHandler.exceptionEvent.getSubscriber());
assertEquals("Method should be available.",
subscriber.getClass().getMethod("throwExceptionOn", String.class),
exceptionHandler.exceptionEvent.getSubscriberMethod());
}
public void testSubscriberThrowsExceptionHandlerThrowsException() {
final EventBus eventBus = new EventBus();
eventBus.register(new ThrowingSubscriberExceptionHandler());
final Object subscriber = new Object() {
@Subscribe
public void throwExceptionOn(String message) {
throw new RuntimeException();
}
};
eventBus.register(subscriber);
try {
eventBus.publish(EVENT);
} catch (RuntimeException e) {
fail("Exception should not be thrown.");
}
}
public void testDeadEventForwarding() {
GhostCatcher catcher = new GhostCatcher();
bus.register(catcher);
// A String -- an event for which noone has registered.
bus.publish(EVENT);
List<DeadEvent> events = catcher.getEvents();
assertEquals("One dead event should be delivered.", 1, events.size());
assertEquals("The dead event should wrap the original event.",
EVENT, events.get(0).getEvent());
}
public void testDeadEventPosting() {
GhostCatcher catcher = new GhostCatcher();
bus.register(catcher);
bus.publish(new DeadEvent(this, EVENT));
List<DeadEvent> events = catcher.getEvents();
assertEquals("The explicit DeadEvent should be delivered.",
1, events.size());
assertEquals("The dead event must not be re-wrapped.",
EVENT, events.get(0).getEvent());
}
public void testMissingSubscribe() {
bus.register(new Object());
}
public void testUnregister() {
StringCatcher catcher1 = new StringCatcher();
StringCatcher catcher2 = new StringCatcher();
try {
bus.unregister(catcher1);
} catch (IllegalArgumentException expected) {
fail("Attempting to unregister an unregistered object succeeded");
// Pustike: unlike guava, doesn't throw when unregistering a not-registered subscriber!
}
bus.register(catcher1);
bus.publish(EVENT);
bus.register(catcher2);
bus.publish(EVENT);
List<String> expectedEvents = new ArrayList<>();
expectedEvents.add(EVENT);
expectedEvents.add(EVENT);
assertEquals("Two correct events should be delivered.",
expectedEvents, catcher1.getEvents());
assertEquals("One correct event should be delivered.",
List.of(EVENT), catcher2.getEvents());
bus.unregister(catcher1);
bus.publish(EVENT);
assertEquals("Shouldn't catch any more events when unregistered.",
expectedEvents, catcher1.getEvents());
assertEquals("Two correct events should be delivered.",
expectedEvents, catcher2.getEvents());
try {
bus.unregister(catcher1);
} catch (IllegalArgumentException expected) {
fail("Attempting to unregister an unregistered object succeeded");
// Pustike: unlike guava, doesn't throw when unregistering a not-registered subscriber!
}
bus.unregister(catcher2);
bus.publish(EVENT);
assertEquals("Shouldn't catch any more events when unregistered.",
expectedEvents, catcher1.getEvents());
assertEquals("Shouldn't catch any more events when unregistered.",
expectedEvents, catcher2.getEvents());
}
// NOTE: This test will always pass if register() is thread-safe but may also
// pass if it isn't, though this is unlikely.
public void testRegisterThreadSafety() throws Exception {
List<StringCatcher> catchers = new CopyOnWriteArrayList<>();
List<Future<?>> futures = new ArrayList<>();
ExecutorService executor = Executors.newFixedThreadPool(10);
int numberOfCatchers = 10000;
for (int i = 0; i < numberOfCatchers; i++) {
futures.add(executor.submit(new Registrator(bus, catchers)));
}
for (int i = 0; i < numberOfCatchers; i++) {
futures.get(i).get();
}
assertEquals("Unexpected number of catchers in the list",
numberOfCatchers, catchers.size());
bus.publish(EVENT);
List<String> expectedEvents = List.of(EVENT);
for (StringCatcher catcher : catchers) {
assertEquals("One of the registered catchers did not receive an event.",
expectedEvents, catcher.getEvents());
}
}
/**
* Tests that bridge methods are not subscribed to events. In Java 8, annotations are included on the bridge method
* in addition to the original method, which causes both the original and bridge methods to be subscribed (since
* both are annotated @Subscribe) without specifically checking for bridge methods.
*/
public void testRegistrationWithBridgeMethod() {
final AtomicInteger calls = new AtomicInteger();
bus.register(new Callback<String>() {
@Subscribe
@Override
public void call(String s) {
calls.incrementAndGet();
}
});
bus.publish("hello");
assertEquals(1, calls.get());
}
/**
* Records thrown exception information.
*/
private static final class RecordingSubscriberExceptionHandler {
private ExceptionEvent exceptionEvent;
private Throwable exception;
@Subscribe
public void handleException(ExceptionEvent exceptionEvent) {
this.exceptionEvent = exceptionEvent;
this.exception = exceptionEvent.getCause();
}
}
private static final class ThrowingSubscriberExceptionHandler {
@Subscribe
public void handleException(ExceptionEvent exceptionEvent) {
throw new RuntimeException();
}
}
/**
* Runnable which registers a StringCatcher on an event bus and adds it to a list.
*/
private static class Registrator implements Runnable {
private final EventBus bus;
private final List<StringCatcher> catchers;
Registrator(EventBus bus, List<StringCatcher> catchers) {
this.bus = bus;
this.catchers = catchers;
}
@Override
public void run() {
StringCatcher catcher = new StringCatcher();
bus.register(catcher);
catchers.add(catcher);
}
}
/**
* A collector for DeadEvents.
* @author cbiffle
*/
public static class GhostCatcher {
private final List<DeadEvent> events = new ArrayList<>();
@Subscribe
public void ohNoesIHaveDied(DeadEvent event) {
events.add(event);
}
public List<DeadEvent> getEvents() {
return events;
}
}
public void testTypedEventSubscribers() {
final EventBus eventBus = new EventBus();
TypedEventSubscriber eventSubscriber = new TypedEventSubscriber();
eventBus.register(eventSubscriber);
eventBus.publish(new TypedEvent<>(""));
eventBus.publish(new TypedEvent<>(6));
assertEquals("One TypedEvent<String> should be delivered.", 1, eventSubscriber.typedStringEvents.size());
assertEquals("One TypedEvent<Integer> should be delivered.", 1, eventSubscriber.typedIntEvents.size());
}
public static class TypedEventSubscriber {
private final List<TypedEvent<String>> typedStringEvents = new ArrayList<>();
private final List<TypedEvent<Integer>> typedIntEvents = new ArrayList<>();
@Subscribe
public void onTypedStringEvent(TypedEvent<String> event) {
typedStringEvents.add(event);
}
@Subscribe
public void onTypedIntEvent(TypedEvent<Integer> event) {
typedIntEvents.add(event);
}
}
private interface Callback<T> {
void call(T t);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.arq.querybuilder.handlers;
import java.util.Map;
import org.apache.jena.graph.Node;
import org.apache.jena.query.Query;
import org.apache.jena.sparql.core.Var;
/**
* A class to handle all the handlers of a query builder and keep them in sync
* as needed.
*
*/
public class HandlerBlock {
private final AggregationHandler aggHandler;
private final ConstructHandler constructHandler;
private final DatasetHandler datasetHandler;
private final PrologHandler prologHandler;
private final SelectHandler selectHandler;
private final SolutionModifierHandler modifierHandler;
private final WhereHandler whereHandler;
private final ValuesHandler valueHandler;
/**
* Constructor.
*
* @param query The query we are working with.
*/
public HandlerBlock(Query query) {
prologHandler = new PrologHandler(query);
aggHandler = new AggregationHandler(query);
whereHandler = new WhereHandler(query);
datasetHandler = new DatasetHandler(query);
modifierHandler = new SolutionModifierHandler(query);
valueHandler = new ValuesHandler(query);
/*
* selecthandler and constructhandler may be null so processthem
* accordingly
*/
SelectHandler sTemp = null;
ConstructHandler cTemp = null;
if (query.isSelectType()) {
sTemp = new SelectHandler(aggHandler);
} else if (query.isAskType()) {
// nochange
} else if (query.isDescribeType()) {
sTemp = new SelectHandler(aggHandler);
} else if (query.isConstructType()) {
cTemp = new ConstructHandler(query);
}
selectHandler = sTemp;
constructHandler = cTemp;
}
/**
* Get the aggregation handler.
*
* @return the aggregation handler.
*/
public AggregationHandler getAggregationHandler() {
return aggHandler;
}
/**
* Get the construct handler.
*
* @return the construct handler or null.
*/
public ConstructHandler getConstructHandler() {
return constructHandler;
}
/**
* Get the dataset handler.
*
* @return the dataset handler.
*/
public DatasetHandler getDatasetHandler() {
return datasetHandler;
}
/**
* Get the prolog handler.
*
* @return the prolog handler.
*/
public PrologHandler getPrologHandler() {
return prologHandler;
}
/**
* Get the select handler.
*
* @return the select handler or null.
*/
public SelectHandler getSelectHandler() {
return selectHandler;
}
/**
* Get the solution modifier handler.
*
* @return the solution modifier handler.
*/
public SolutionModifierHandler getModifierHandler() {
return modifierHandler;
}
/**
* Get the where handler.
*
* @return the where handler.
*/
public WhereHandler getWhereHandler() {
return whereHandler;
}
/**
* Get the value handler.
*
* @return the value handler.
*/
public ValuesHandler getValueHandler() {
return valueHandler;
}
/**
* Add the prolog handler contents to this prolog handler.
*
* @param handler The prolog handler to add to this one.
*/
public void addAll(PrologHandler handler) {
prologHandler.addAll(handler);
}
/**
* Add the aggregation handler contents to this prolog handler.
*
* @param handler The aggregation handler to add to this one.
*/
public void addAll(AggregationHandler handler) {
aggHandler.addAll(handler);
}
/**
* Add the construct handler contents to this prolog handler. If this construct
* handler is null or the handler argument is null this method does nothing.
*
* @param handler The construct handler to add to this one.
*/
public void addAll(ConstructHandler handler) {
if (constructHandler != null && handler != null) {
constructHandler.addAll(handler);
}
}
/**
* Add the dataset handler contents to this prolog handler.
*
* @param handler The dataset handler to add to this one.
*/
public void addAll(DatasetHandler handler) {
datasetHandler.addAll(handler);
}
/**
* Add the solution modifier handler contents to this prolog handler.
*
* @param handler The solution modifier handler to add to this one.
*/
public void addAll(SolutionModifierHandler handler) {
modifierHandler.addAll(handler);
}
/**
* Add the select handler contents to this prolog handler. If this select
* handler is null or the handler argument is null this method does nothing.
*
* @param handler The construct handler to add to this one.
*/
public void addAll(SelectHandler handler) {
if (selectHandler != null && handler != null) {
selectHandler.addAll(handler);
}
}
/**
* Add the where handler contents to this prolog handler.
*
* @param handler The where handler to add to this one.
*/
public void addAll(WhereHandler handler) {
whereHandler.addAll(handler);
}
/**
* Add the values handler contents to this prolog handler.
*
* @param handler The values handler to add to this one.
*/
public void addAll(ValuesHandler handler) {
valueHandler.addAll(handler);
}
/**
* Add all of the handlers in the handler block to this one. Any handler that is
* null or is null in the handler argument are properly skipped.
*
* @param handler The handler block to add to this one.
*/
public void addAll(HandlerBlock handler) {
addAll(handler.aggHandler);
addAll(handler.constructHandler);
addAll(handler.selectHandler);
addAll(handler.datasetHandler);
addAll(handler.modifierHandler);
addAll(handler.prologHandler);
addAll(handler.whereHandler);
addAll(handler.valueHandler);
}
/**
* Set the variables in all the enclosed handlers in the proper order.
*
* @param values The map of values to set.
*/
public void setVars(Map<Var, Node> values) {
aggHandler.setVars(values);
if (constructHandler != null) {
constructHandler.setVars(values);
}
datasetHandler.setVars(values);
prologHandler.setVars(values);
if (selectHandler != null) {
selectHandler.setVars(values);
}
modifierHandler.setVars(values);
whereHandler.setVars(values);
}
/**
* Build all the enclosed handlers in the proper order.
*/
public void build() {
prologHandler.build();
if (selectHandler != null) {
selectHandler.build();
}
if (constructHandler != null) {
constructHandler.build();
}
datasetHandler.build();
modifierHandler.build();
whereHandler.build();
aggHandler.build();
valueHandler.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.UnaryOperator;
import java.util.stream.IntStream;
import javax.cache.configuration.Factory;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ExpiryPolicy;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteState;
import org.apache.ignite.Ignition;
import org.apache.ignite.IgnitionListener;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.failure.FailureHandler;
import org.apache.ignite.failure.StopNodeFailureHandler;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.maintenance.MaintenanceFileStore;
import org.apache.ignite.internal.pagemem.store.PageStoreCollection;
import org.apache.ignite.internal.processors.cache.CacheGroupContext;
import org.apache.ignite.internal.processors.cache.persistence.defragmentation.DefragmentationFileUtils;
import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory;
import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStore;
import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager;
import org.apache.ignite.internal.util.lang.IgniteThrowableConsumer;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.maintenance.MaintenanceRegistry;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.internal.pagemem.PageIdAllocator.INDEX_PARTITION;
import static org.apache.ignite.internal.processors.cache.persistence.defragmentation.DefragmentationFileUtils.defragmentationCompletionMarkerFile;
import static org.apache.ignite.internal.processors.cache.persistence.defragmentation.DefragmentationFileUtils.defragmentedIndexFile;
import static org.apache.ignite.internal.processors.cache.persistence.defragmentation.DefragmentationFileUtils.defragmentedPartFile;
import static org.apache.ignite.internal.processors.cache.persistence.defragmentation.DefragmentationFileUtils.defragmentedPartMappingFile;
import static org.apache.ignite.internal.processors.cache.persistence.defragmentation.maintenance.DefragmentationParameters.toStore;
import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.DFLT_STORE_DIR;
/** */
public class IgnitePdsDefragmentationTest extends GridCommonAbstractTest {
/** */
public static final String CACHE_2_NAME = "cache2";
/** */
public static final int PARTS = 5;
/** */
public static final int ADDED_KEYS_COUNT = 1500;
/** */
protected static final String GRP_NAME = "group";
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
stopAllGrids(true);
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
stopAllGrids(true);
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected FailureHandler getFailureHandler(String igniteInstanceName) {
return new StopNodeFailureHandler();
}
/** */
protected static class PolicyFactory implements Factory<ExpiryPolicy> {
/** Serial version uid. */
private static final long serialVersionUID = 0L;
/** {@inheritDoc} */
@Override public ExpiryPolicy create() {
return new ExpiryPolicy() {
@Override public Duration getExpiryForCreation() {
return new Duration(TimeUnit.MILLISECONDS, 13000);
}
/** {@inheritDoc} */
@Override public Duration getExpiryForAccess() {
return new Duration(TimeUnit.MILLISECONDS, 13000);
}
/** {@inheritDoc} */
@Override public Duration getExpiryForUpdate() {
return new Duration(TimeUnit.MILLISECONDS, 13000);
}
};
}
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setConsistentId(igniteInstanceName);
DataStorageConfiguration dsCfg = new DataStorageConfiguration();
dsCfg.setWalSegmentSize(4 * 1024 * 1024);
dsCfg.setDefaultDataRegionConfiguration(
new DataRegionConfiguration()
.setInitialSize(100L * 1024 * 1024)
.setMaxSize(1024L * 1024 * 1024)
.setPersistenceEnabled(true)
);
cfg.setDataStorageConfiguration(dsCfg);
CacheConfiguration<?, ?> cache1Cfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME)
.setAtomicityMode(TRANSACTIONAL)
.setGroupName(GRP_NAME)
.setAffinity(new RendezvousAffinityFunction(false, PARTS));
CacheConfiguration<?, ?> cache2Cfg = new CacheConfiguration<>(CACHE_2_NAME)
.setAtomicityMode(TRANSACTIONAL)
.setGroupName(GRP_NAME)
.setExpiryPolicyFactory(new PolicyFactory())
.setAffinity(new RendezvousAffinityFunction(false, PARTS));
cfg.setCacheConfiguration(cache1Cfg, cache2Cfg);
return cfg;
}
/**
* Basic test scenario. Does following steps:
* - Start node;
* - Fill cache;
* - Remove part of data;
* - Stop node;
* - Start node in defragmentation mode;
* - Stop node;
* - Start node;
* - Check that partitions became smaller;
* - Check that cache is accessible and works just fine.
*
* @throws Exception If failed.
*/
@Test
public void testSuccessfulDefragmentation() throws Exception {
IgniteEx ig = startGrid(0);
ig.cluster().state(ClusterState.ACTIVE);
fillCache(ig.cache(DEFAULT_CACHE_NAME));
forceCheckpoint(ig);
createMaintenanceRecord();
stopGrid(0);
File workDir = resolveCacheWorkDir(ig);
long[] oldPartLen = partitionSizes(workDir);
long oldIdxFileLen = new File(workDir, FilePageStoreManager.INDEX_FILE_NAME).length();
startGrid(0);
waitForDefragmentation(0);
assertEquals(ClusterState.INACTIVE, grid(0).context().state().clusterState().state());
GridTestUtils.assertThrowsAnyCause(
log,
() -> {
grid(0).cluster().state(ClusterState.ACTIVE);
return null;
},
IgniteCheckedException.class,
"Failed to activate cluster (node is in maintenance mode)"
);
long[] newPartLen = partitionSizes(workDir);
for (int p = 0; p < PARTS; p++)
assertTrue(newPartLen[p] < oldPartLen[p]);
long newIdxFileLen = new File(workDir, FilePageStoreManager.INDEX_FILE_NAME).length();
assertTrue(newIdxFileLen <= oldIdxFileLen);
File completionMarkerFile = defragmentationCompletionMarkerFile(workDir);
assertTrue(completionMarkerFile.exists());
stopGrid(0);
IgniteEx ig0 = startGrid(0);
ig0.cluster().state(ClusterState.ACTIVE);
assertFalse(completionMarkerFile.exists());
validateCache(grid(0).cache(DEFAULT_CACHE_NAME));
validateLeftovers(workDir);
}
protected long[] partitionSizes(CacheGroupContext grp) {
final int grpId = grp.groupId();
return IntStream.concat(
IntStream.of(INDEX_PARTITION),
IntStream.range(0, grp.shared().affinity().affinity(grpId).partitions())
).mapToLong(p -> {
try {
final FilePageStore store = (FilePageStore) ((PageStoreCollection) grp.shared().pageStore()).getStore(grpId, p);
return new File(store.getFileAbsolutePath()).length();
} catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
}).toArray();
}
/**
* @return Working directory for cache group {@link IgnitePdsDefragmentationTest#GRP_NAME}.
* @throws IgniteCheckedException If failed for some reason, like if it's a file instead of directory.
*/
private File resolveCacheWorkDir(IgniteEx ig) throws IgniteCheckedException {
File dbWorkDir = U.resolveWorkDirectory(U.defaultWorkDirectory(), DFLT_STORE_DIR, false);
File nodeWorkDir = new File(dbWorkDir, U.maskForFileName(ig.name()));
return new File(nodeWorkDir, FilePageStoreManager.CACHE_GRP_DIR_PREFIX + GRP_NAME);
}
/**
* Force checkpoint and wait for it so all partitions will be in their final state after restart if no more data is
* uploaded.
*
* @param ig Ignite node.
* @throws IgniteCheckedException If checkpoint failed for some reason.
*/
private void forceCheckpoint(IgniteEx ig) throws IgniteCheckedException {
ig.context().cache().context().database()
.forceCheckpoint("testDefrag")
.futureFor(CheckpointState.FINISHED)
.get();
}
/** */
protected void waitForDefragmentation(int idx) throws IgniteCheckedException {
IgniteEx ig = grid(idx);
((GridCacheDatabaseSharedManager)ig.context().cache().context().database())
.defragmentationManager()
.completionFuture()
.get();
}
/** */
protected void createMaintenanceRecord(String... cacheNames) throws IgniteCheckedException {
IgniteEx grid = grid(0);
MaintenanceRegistry mntcReg = grid.context().maintenanceRegistry();
final List<String> caches = new ArrayList<>();
caches.add(DEFAULT_CACHE_NAME);
if (cacheNames != null && cacheNames.length != 0)
caches.addAll(Arrays.asList(cacheNames));
mntcReg.registerMaintenanceTask(toStore(caches));
}
/**
* Returns array that contains sizes of partition files in gived working directories. Assumes that partitions
* {@code 0} to {@code PARTS - 1} exist in that dir.
*
* @param workDir Working directory.
* @return The array.
*/
protected long[] partitionSizes(File workDir) {
return IntStream.range(0, PARTS)
.mapToObj(p -> new File(workDir, String.format(FilePageStoreManager.PART_FILE_TEMPLATE, p)))
.mapToLong(File::length)
.toArray();
}
/**
* Checks that plain node start after failed defragmentation will finish batch renaming.
*
* @throws Exception If failed.
*/
@Test
public void testFailoverRestartWithoutDefragmentation() throws Exception {
testFailover(workDir -> {
try {
File mntcRecFile = new File(workDir.getParent(), MaintenanceFileStore.MAINTENANCE_FILE_NAME);
assertTrue(mntcRecFile.exists());
Files.delete(mntcRecFile.toPath());
startGrid(0);
validateLeftovers(workDir);
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
finally {
createMaintenanceRecord();
stopGrid(0);
}
});
}
/**
* Checks that second start in defragmentation mode will finish defragmentation if no completion marker was found.
*
* @throws Exception If failed.
*/
@Test
public void testFailoverOnLastStage() throws Exception {
testFailover(workDir -> {});
}
/**
* Checks that second start in defragmentation mode will finish defragmentation if index was not defragmented.
*
* @throws Exception If failed.
*/
@Test
public void testFailoverIncompletedIndex() throws Exception {
testFailover(workDir -> move(
DefragmentationFileUtils.defragmentedIndexFile(workDir),
DefragmentationFileUtils.defragmentedIndexTmpFile(workDir)
));
}
/**
* Checks that second start in defragmentation mode will finish defragmentation if partition was not defragmented.
*
* @throws Exception If failed.
*/
@Test
public void testFailoverIncompletedPartition1() throws Exception {
testFailover(workDir -> {
DefragmentationFileUtils.defragmentedIndexFile(workDir).delete();
move(
DefragmentationFileUtils.defragmentedPartFile(workDir, PARTS - 1),
DefragmentationFileUtils.defragmentedPartTmpFile(workDir, PARTS - 1)
);
});
}
/**
* Checks that second start in defragmentation mode will finish defragmentation if no mapping was found for partition.
*
* @throws Exception If failed.
*/
@Test
public void testFailoverIncompletedPartition2() throws Exception {
testFailover(workDir -> {
DefragmentationFileUtils.defragmentedIndexFile(workDir).delete();
DefragmentationFileUtils.defragmentedPartMappingFile(workDir, PARTS - 1).delete();
});
}
/** */
private void move(File from, File to) throws IgniteCheckedException {
try {
Files.move(from.toPath(), to.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
}
/** */
private void testFailover(IgniteThrowableConsumer<File> c) throws Exception {
IgniteEx ig = startGrid(0);
ig.cluster().state(ClusterState.ACTIVE);
fillCache(ig.cache(DEFAULT_CACHE_NAME));
forceCheckpoint(ig);
createMaintenanceRecord();
stopGrid(0);
File workDir = resolveCacheWorkDir(ig);
//Defragmentation should fail when node starts.
startAndAwaitNodeFail(workDir);
c.accept(workDir);
startGrid(0); // Fails here VERY rarely. WTF?
waitForDefragmentation(0);
stopGrid(0);
// Everything must be completed.
startGrid(0).cluster().state(ClusterState.ACTIVE);
validateCache(grid(0).cache(DEFAULT_CACHE_NAME));
validateLeftovers(workDir);
}
/**
* @throws IgniteInterruptedCheckedException If fail.
*/
private void startAndAwaitNodeFail(File workDir) throws IgniteInterruptedCheckedException {
String errMsg = "Failed to create defragmentation completion marker.";
AtomicBoolean errOccurred = new AtomicBoolean();
UnaryOperator<IgniteConfiguration> cfgOp = cfg -> {
DataStorageConfiguration dsCfg = cfg.getDataStorageConfiguration();
FileIOFactory delegate = dsCfg.getFileIOFactory();
dsCfg.setFileIOFactory((file, modes) -> {
if (file.equals(defragmentationCompletionMarkerFile(workDir))) {
errOccurred.set(true);
throw new IOException(errMsg);
}
return delegate.create(file, modes);
});
return cfg;
};
AtomicBoolean nodeStopped = new AtomicBoolean();
IgnitionListener nodeStopListener = (name, state) -> {
if (name.equals(getTestIgniteInstanceName(0)) && state == IgniteState.STOPPED_ON_FAILURE)
nodeStopped.set(true);
};
Ignition.addListener(nodeStopListener);
try {
try {
startGrid(0, cfgOp);
}
catch (Exception ignore) {
// No-op.
}
// Failed node can leave interrupted status of the thread that needs to be cleared,
// otherwise following "wait" wouldn't work.
// This call can't be moved inside of "catch" block because interruption can actually be silent.
Thread.interrupted();
assertTrue(GridTestUtils.waitForCondition(errOccurred::get, 3_000L));
assertTrue(GridTestUtils.waitForCondition(nodeStopped::get, 3_000L));
}
finally {
Ignition.removeListener(nodeStopListener);
}
}
/** */
public void validateLeftovers(File workDir) {
assertFalse(defragmentedIndexFile(workDir).exists());
for (int p = 0; p < PARTS; p++) {
assertFalse(defragmentedPartMappingFile(workDir, p).exists());
assertFalse(defragmentedPartFile(workDir, p).exists());
}
}
/** */
@Test
public void testDefragmentedPartitionCreated() throws Exception {
IgniteEx ig = startGrid(0);
ig.cluster().state(ClusterState.ACTIVE);
fillCache(ig.cache(DEFAULT_CACHE_NAME));
fillCache(ig.getOrCreateCache(CACHE_2_NAME));
createMaintenanceRecord();
stopGrid(0);
startGrid(0);
waitForDefragmentation(0);
File workDir = U.resolveWorkDirectory(U.defaultWorkDirectory(), DFLT_STORE_DIR, false);
AtomicReference<File> cachePartFile = new AtomicReference<>();
AtomicReference<File> defragCachePartFile = new AtomicReference<>();
Files.walkFileTree(workDir.toPath(), new FileVisitor<Path>() {
@Override public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes basicFileAttributes) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override public FileVisitResult visitFile(Path path, BasicFileAttributes basicFileAttributes) throws IOException {
if (path.toString().contains("cacheGroup-group")) {
File file = path.toFile();
if (file.getName().contains("part-dfrg-"))
cachePartFile.set(file);
else if (file.getName().contains("part-"))
defragCachePartFile.set(file);
}
return FileVisitResult.CONTINUE;
}
@Override public FileVisitResult visitFileFailed(Path path, IOException e) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override public FileVisitResult postVisitDirectory(Path path, IOException e) throws IOException {
return FileVisitResult.CONTINUE;
}
});
assertNull(cachePartFile.get()); //TODO Fails.
assertNotNull(defragCachePartFile.get());
}
/**
* Fill cache using integer keys.
*
* @param cache
*/
protected void fillCache(IgniteCache<Integer, Object> cache) {
fillCache(Function.identity(), cache);
}
/** */
protected <T> void fillCache(Function<Integer, T> keyMapper, IgniteCache<T, Object> cache) {
try (IgniteDataStreamer<T, Object> ds = grid(0).dataStreamer(cache.getName())) {
for (int i = 0; i < ADDED_KEYS_COUNT; i++) {
byte[] val = new byte[8192];
new Random().nextBytes(val);
ds.addData(keyMapper.apply(i), val);
}
}
try (IgniteDataStreamer<T, Object> ds = grid(0).dataStreamer(cache.getName())) {
ds.allowOverwrite(true);
for (int i = 0; i <= ADDED_KEYS_COUNT / 2; i++)
ds.removeData(keyMapper.apply(i * 2));
}
}
/** */
public void validateCache(IgniteCache<Object, Object> cache) {
for (int k = 0; k < ADDED_KEYS_COUNT; k++) {
Object val = cache.get(k);
if (k % 2 == 0)
assertNull(val);
else
assertNotNull(val);
}
}
/**
* Start node, wait for defragmentation and validate that sizes of caches are less than those before the defragmentation.
* @param gridId Idx of ignite grid.
* @param groups Cache groups to check.
* @throws Exception If failed.
*/
protected void defragmentAndValidateSizesDecreasedAfterDefragmentation(int gridId, CacheGroupContext... groups) throws Exception {
for (CacheGroupContext grp : groups) {
final long[] oldPartLen = partitionSizes(grp);
startGrid(0);
waitForDefragmentation(0);
stopGrid(0);
final long[] newPartLen = partitionSizes(grp);
boolean atLeastOneSmaller = false;
for (int p = 0; p < oldPartLen.length; p++) {
assertTrue(newPartLen[p] <= oldPartLen[p]);
if (newPartLen[p] < oldPartLen[p])
atLeastOneSmaller = true;
}
assertTrue(atLeastOneSmaller);
}
}
}
| |
package edu.asu.plp.tool.prototype.model;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import javafx.beans.InvalidationListener;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
public class ArrayListProperty<E> implements ObservableList<E>
{
private ObservableList<E> backingList;
public ArrayListProperty()
{
this.backingList = FXCollections.observableArrayList();
}
@Override
public boolean add(E element)
{
return backingList.add(element);
}
@Override
public void add(int atIndex, E element)
{
backingList.add(atIndex, element);
}
@Override
public boolean addAll(Collection<? extends E> elements)
{
return backingList.addAll(elements);
}
@Override
public boolean addAll(int atIndex, Collection<? extends E> elements)
{
return backingList.addAll(atIndex, elements);
}
@Override
public void clear()
{
backingList.clear();
}
@Override
public boolean contains(Object element)
{
return backingList.contains(element);
}
@Override
public boolean containsAll(Collection<?> elements)
{
return backingList.containsAll(elements);
}
@Override
public E get(int index)
{
return backingList.get(index);
}
@Override
public int indexOf(Object element)
{
return backingList.indexOf(element);
}
@Override
public boolean isEmpty()
{
return backingList.isEmpty();
}
@Override
public Iterator<E> iterator()
{
return backingList.iterator();
}
@Override
public int lastIndexOf(Object arg0)
{
return backingList.lastIndexOf(arg0);
}
@Override
public ListIterator<E> listIterator()
{
return backingList.listIterator();
}
@Override
public ListIterator<E> listIterator(int arg0)
{
return backingList.listIterator(arg0);
}
@Override
public boolean remove(Object arg0)
{
return backingList.remove(arg0);
}
@Override
public E remove(int arg0)
{
return backingList.remove(arg0);
}
@Override
public boolean removeAll(Collection<?> arg0)
{
return backingList.removeAll(arg0);
}
@Override
public boolean retainAll(Collection<?> arg0)
{
return backingList.retainAll(arg0);
}
@Override
public E set(int arg0, E arg1)
{
return backingList.set(arg0, arg1);
}
@Override
public int size()
{
return backingList.size();
}
@Override
public List<E> subList(int arg0, int arg1)
{
return backingList.subList(arg0, arg1);
}
@Override
public Object[] toArray()
{
return backingList.toArray();
}
@Override
public <T> T[] toArray(T[] arg0)
{
return backingList.toArray(arg0);
}
@Override
public void addListener(InvalidationListener arg0)
{
backingList.addListener(arg0);
}
@Override
public void removeListener(InvalidationListener arg0)
{
backingList.removeListener(arg0);
}
@Override
@SuppressWarnings("unchecked")
public boolean addAll(E... arg0)
{
return backingList.addAll(arg0);
}
@Override
public void addListener(ListChangeListener<? super E> arg0)
{
backingList.addListener(arg0);
}
@Override
public void remove(int arg0, int arg1)
{
backingList.remove(arg0, arg1);
}
@Override
@SuppressWarnings("unchecked")
public boolean removeAll(E... arg0)
{
return backingList.removeAll(arg0);
}
@Override
public void removeListener(ListChangeListener<? super E> arg0)
{
backingList.removeListener(arg0);
}
@Override
@SuppressWarnings("unchecked")
public boolean retainAll(E... arg0)
{
return backingList.retainAll(arg0);
}
@Override
@SuppressWarnings("unchecked")
public boolean setAll(E... arg0)
{
return backingList.setAll(arg0);
}
@Override
public boolean setAll(Collection<? extends E> arg0)
{
return backingList.setAll(arg0);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.