gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package net.bytebuddy.description.type;
import net.bytebuddy.description.field.FieldList;
import net.bytebuddy.test.utility.ObjectPropertyAssertion;
import org.junit.Before;
import org.junit.Test;
import java.io.Serializable;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TypeDescriptionGenericVisitorAssignerTest {
private TypeDescription.Generic collectionWildcard, collectionRaw;
private TypeDescription.Generic collectionTypeVariableT, collectionTypeVariableS, collectionTypeVariableU;
private TypeDescription.Generic collectionUpperBoundTypeVariableT, collectionUpperBoundTypeVariableS, collectionUpperBoundTypeVariableU;
private TypeDescription.Generic collectionLowerBoundTypeVariableT, collectionLowerBoundTypeVariableS, collectionLowerBoundTypeVariableU;
private TypeDescription.Generic listRaw, listWildcard;
private TypeDescription.Generic abstractListRaw, arrayListRaw, arrayListWildcard;
private TypeDescription.Generic callableWildcard;
private TypeDescription.Generic arrayListTypeVariableT, arrayListTypeVariableS;
private TypeDescription.Generic collectionRawArray, listRawArray, listWildcardArray, arrayListRawArray;
private TypeDescription.Generic stringArray, objectArray, objectNestedArray;
private TypeDescription.Generic unboundWildcard;
private TypeDescription.Generic typeVariableT, typeVariableS, typeVariableU, typeVariableV;
private TypeDescription.Generic arrayTypeVariableT, arrayTypeVariableS, arrayTypeVariableU;
private TypeDescription.Generic arrayNestedTypeVariableT;
@Before
public void setUp() throws Exception {
FieldList<?> fields = new TypeDescription.ForLoadedType(GenericTypes.class).getDeclaredFields();
collectionRaw = fields.filter(named("collectionRaw")).getOnly().getType();
collectionWildcard = fields.filter(named("collectionWildcard")).getOnly().getType();
collectionTypeVariableT = fields.filter(named("collectionTypeVariableT")).getOnly().getType();
collectionTypeVariableS = fields.filter(named("collectionTypeVariableS")).getOnly().getType();
collectionTypeVariableU = fields.filter(named("collectionTypeVariableU")).getOnly().getType();
collectionUpperBoundTypeVariableT = fields.filter(named("collectionUpperBoundTypeVariableT")).getOnly().getType();
collectionUpperBoundTypeVariableS = fields.filter(named("collectionUpperBoundTypeVariableS")).getOnly().getType();
collectionUpperBoundTypeVariableU = fields.filter(named("collectionUpperBoundTypeVariableU")).getOnly().getType();
collectionLowerBoundTypeVariableT = fields.filter(named("collectionLowerBoundTypeVariableT")).getOnly().getType();
collectionLowerBoundTypeVariableS = fields.filter(named("collectionLowerBoundTypeVariableS")).getOnly().getType();
collectionLowerBoundTypeVariableU = fields.filter(named("collectionLowerBoundTypeVariableU")).getOnly().getType();
listRaw = fields.filter(named("listRaw")).getOnly().getType();
listWildcard = fields.filter(named("listWildcard")).getOnly().getType();
arrayListTypeVariableT = fields.filter(named("arrayListTypeVariableT")).getOnly().getType();
arrayListTypeVariableS = fields.filter(named("arrayListTypeVariableS")).getOnly().getType();
TypeDescription.Generic arrayListTypeVariableU = fields.filter(named("arrayListTypeVariableU")).getOnly().getType();
TypeDescription.Generic arrayListTypeVariableV = fields.filter(named("arrayListTypeVariableV")).getOnly().getType();
abstractListRaw = fields.filter(named("abstractListRaw")).getOnly().getType();
callableWildcard = fields.filter(named("callableWildcard")).getOnly().getType();
arrayListRaw = fields.filter(named("arrayListRaw")).getOnly().getType();
arrayListWildcard = fields.filter(named("arrayListWildcard")).getOnly().getType();
collectionRawArray = fields.filter(named("collectionRawArray")).getOnly().getType();
listRawArray = fields.filter(named("listRawArray")).getOnly().getType();
listWildcardArray = fields.filter(named("listWildcardArray")).getOnly().getType();
arrayListRawArray = fields.filter(named("arrayListRawArray")).getOnly().getType();
stringArray = new TypeDescription.Generic.OfNonGenericType.ForLoadedType(String[].class);
objectArray = new TypeDescription.Generic.OfNonGenericType.ForLoadedType(Object[].class);
objectNestedArray = new TypeDescription.Generic.OfNonGenericType.ForLoadedType(Object[][].class);
unboundWildcard = listWildcard.getTypeArguments().getOnly();
typeVariableT = arrayListTypeVariableT.getTypeArguments().getOnly();
typeVariableS = arrayListTypeVariableS.getTypeArguments().getOnly();
typeVariableU = arrayListTypeVariableU.getTypeArguments().getOnly();
typeVariableV = arrayListTypeVariableV.getTypeArguments().getOnly();
arrayTypeVariableT = fields.filter(named("arrayTypeVariableT")).getOnly().getType();
arrayTypeVariableS = fields.filter(named("arrayTypeVariableS")).getOnly().getType();
arrayTypeVariableU = fields.filter(named("arrayTypeVariableU")).getOnly().getType();
arrayNestedTypeVariableT = fields.filter(named("arrayNestedTypeVariableT")).getOnly().getType();
}
@Test(expected = IllegalArgumentException.class)
public void testAssignFromWildcardThrowsException() throws Exception {
unboundWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE);
}
@Test
public void testAssignNonGenericTypeFromAssignableNonGenericType() throws Exception {
assertThat(TypeDescription.Generic.OBJECT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(TypeDescription.STRING.asGenericType()), is(true));
}
@Test
public void testAssignNonGenericTypeFromNonAssignableNonGenericType() throws Exception {
assertThat(TypeDescription.STRING.asGenericType().accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(TypeDescription.Generic.OBJECT), is(false));
}
@Test
public void testAssignObjectTypeFromAssignableGenericType() throws Exception {
assertThat(TypeDescription.Generic.OBJECT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcard), is(true));
}
@Test
public void testAssignNonGenericTypeFromNonAssignableGenericType() throws Exception {
assertThat(TypeDescription.STRING.asGenericType().accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcard), is(false));
}
@Test
public void testAssignNonGenericSuperInterfaceTypeFromAssignableGenericInterfaceType() throws Exception {
assertThat(collectionRaw.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcard), is(true));
}
@Test
public void testAssignNonGenericSuperInterfaceTypeFromAssignableGenericType() throws Exception {
assertThat(collectionRaw.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListWildcard), is(true));
}
@Test
public void testAssignRawInterfaceTypeFromEqualGenericInterfaceType() throws Exception {
assertThat(listRaw.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcard), is(true));
}
@Test
public void testAssignRawTypeFromEqualGenericType() throws Exception {
assertThat(arrayListRaw.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListWildcard), is(true));
}
@Test
public void testAssignNonGenericSuperTypeFromAssignableGenericType() throws Exception {
assertThat(abstractListRaw.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListWildcard), is(true));
}
@Test(expected = IllegalArgumentException.class)
public void testAssignNonGenericTypeFromWildcardThrowsException() throws Exception {
TypeDescription.Generic.OBJECT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(unboundWildcard);
}
@Test
public void testAssignNonGenericTypeFromAssignableTypeVariable() throws Exception {
assertThat(TypeDescription.Generic.OBJECT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableT), is(true));
}
@Test
public void testAssignNonGenericTypeFromNonAssignableTypeVariable() throws Exception {
assertThat(TypeDescription.STRING.asGenericType().accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableT), is(false));
}
@Test
public void testAssignNonGenericSuperArrayTypeFromAssignableGenericArrayType() throws Exception {
assertThat(collectionRawArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignRawArrayTypeFromEqualGenericArrayType() throws Exception {
assertThat(listRawArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignNonGenericArrayFromNonAssignableGenericArrayType() throws Exception {
assertThat(stringArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(false));
}
@Test
public void testAssignNonGenericArrayFromAssignableGenericArrayType() throws Exception {
assertThat(objectArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignNonGenericArrayFromGenericArrayTypeOfIncompatibleArity() throws Exception {
assertThat(objectNestedArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(false));
}
@Test
public void testAssignObjectTypeFromGenericArrayType() throws Exception {
assertThat(TypeDescription.Generic.OBJECT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignCloneableTypeFromGenericArrayType() throws Exception {
assertThat(new TypeDescription.Generic.OfNonGenericType.ForLoadedType(Cloneable.class).accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignSerializableTypeFromGenericArrayType() throws Exception {
assertThat(new TypeDescription.Generic.OfNonGenericType.ForLoadedType(Serializable.class).accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(true));
}
@Test
public void testAssignTypeVariableFromNonGenericType() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(TypeDescription.Generic.OBJECT), is(false));
}
@Test(expected = IllegalArgumentException.class)
public void testAssignTypeVariableFromWildcardTypeThrowsException() throws Exception {
typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(unboundWildcard);
}
@Test
public void testAssignTypeVariableFromGenericArrayType() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcardArray), is(false));
}
@Test
public void testAssignTypeVariableFromParameterizedType() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(listWildcard), is(false));
}
@Test
public void testAssignTypeVariableFromEqualTypeVariable() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableT), is(true));
}
@Test
public void testAssignTypeVariableFromNonAssignableWildcard() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableS), is(false));
}
@Test
public void testAssignTypeVariableFromAssignableWildcard() throws Exception {
assertThat(typeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableU), is(true));
}
@Test
public void testAssignGenericArrayFromAssignableGenericArray() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayTypeVariableU), is(true));
}
@Test
public void testAssignGenericNestedArrayFromNonAssignableGenericArray() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayNestedTypeVariableT), is(false));
}
@Test
public void testAssignGenericNestedArrayFromAssignableObjectArray() throws Exception {
assertThat(new TypeDescription.Generic.OfNonGenericType.ForLoadedType(Object[][].class).accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayNestedTypeVariableT), is(true));
}
@Test
public void testAssignGenericArrayFromNonAssignableGenericArray() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayTypeVariableS), is(false));
}
@Test
public void testAssignGenericArrayFromNonAssignableNonGenericNonArrayType() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(TypeDescription.Generic.OBJECT), is(false));
}
@Test
public void testAssignGenericArrayFromNonAssignableNonGenericArrayType() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(objectArray), is(false));
}
@Test
public void testAssignGenericArrayFromAssignableNonGenericArrayType() throws Exception {
assertThat(listWildcardArray.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListRawArray), is(true));
}
@Test
public void testAssignGenericArrayFromNonAssignableTypeVariable() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableT), is(false));
}
@Test
public void testAssignGenericArrayFromNonAssignableParameterizedType() throws Exception {
assertThat(arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListWildcard), is(false));
}
@Test(expected = IllegalArgumentException.class)
public void testAssignGenericArrayFromWildcardThrowsException() throws Exception {
arrayTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(unboundWildcard);
}
@Test
public void testAssignParameterizedWildcardTypeFromEqualType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionWildcard), is(true));
}
@Test
public void testAssignParameterizedWildcardTypeFromEqualRawType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionRaw), is(true));
}
@Test
public void testAssignParameterizedWildcardTypeFromAssignableParameterizedWildcardType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListWildcard), is(true));
}
@Test
public void testAssignParameterizedWildcardTypeFromAssignableParameterizedNonWildcardTypeType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListTypeVariableT), is(true));
}
@Test
public void testAssignParameterizedWildcardTypeFromAssignableTypeVariableType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableV), is(true));
}
@Test
public void testAssignParameterizedWildcardTypeFromNonAssignableRawType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(TypeDescription.STRING.asGenericType()), is(false));
}
@Test
public void testAssignParameterizedWildcardTypeFromNonAssignableParameterizedType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(callableWildcard), is(false));
}
@Test
public void testAssignParameterizedWildcardTypeFromNonAssignableGenericArrayType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayTypeVariableT), is(false));
}
@Test
public void testAssignParameterizedWildcardTypeFromNonAssignableTypeVariableType() throws Exception {
assertThat(collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(typeVariableT), is(false));
}
@Test
public void testAssignParameterizedTypeVariableTypeFromEqualParameterizedTypeVariableTypeType() throws Exception {
assertThat(collectionTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test
public void testAssignParameterizedTypeVariableTypeFromAssignableParameterizedTypeVariableTypeType() throws Exception {
assertThat(collectionTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListTypeVariableT), is(true));
}
@Test
public void testAssignParameterizedTypeVariableTypeFromNonAssignableParameterizedTypeVariableTypeType() throws Exception {
assertThat(collectionTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(arrayListTypeVariableS), is(false));
}
@Test
public void testAssignUpperBoundFromAssignableBound() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test
public void testAssignUpperBoundFromAssignableBoundSuperType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableU), is(true));
}
@Test
public void testAssignUpperBoundFromAssignableUpperBoundSuperType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionUpperBoundTypeVariableU), is(true));
}
@Test
public void testAssignUpperBoundFromAssignableUpperBoundEqualType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableU), is(true));
}
@Test
public void testAssignUpperBoundFromNonAssignableBoundType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableS), is(false));
}
@Test
public void testAssignUpperBoundFromNonAssignableUpperBoundType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionUpperBoundTypeVariableS), is(false));
}
@Test
public void testAssignUpperBoundFromLowerpperBoundType() throws Exception {
assertThat(collectionUpperBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionLowerBoundTypeVariableT), is(false));
}
@Test
public void testAssignLowerBoundFromAssignableBound() throws Exception {
assertThat(collectionLowerBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test
public void testAssignLowerBoundFromAssignableBoundSuperType() throws Exception {
assertThat(collectionLowerBoundTypeVariableU.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test
public void testAssignLowerBoundFromAssignableUpperBoundSuperType() throws Exception {
assertThat(collectionLowerBoundTypeVariableU.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionLowerBoundTypeVariableT), is(true));
}
@Test
public void testAssigLowerBoundFromAssignableUpperBoundEqualType() throws Exception {
assertThat(collectionLowerBoundTypeVariableU.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test
public void testAssignLowerBoundFromNonAssignableBoundType() throws Exception {
assertThat(collectionLowerBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableS), is(false));
}
@Test
public void testAssignLowerBoundFromNonAssignableUpperBoundType() throws Exception {
assertThat(collectionLowerBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionLowerBoundTypeVariableS), is(false));
}
@Test
public void testAssignLowerBoundFromLowerpperBoundType() throws Exception {
assertThat(collectionLowerBoundTypeVariableT.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionUpperBoundTypeVariableT), is(false));
}
@Test
public void testAssignLowerBoundFromAssignableBoundSubType() throws Exception {
assertThat(collectionLowerBoundTypeVariableU.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(collectionTypeVariableT), is(true));
}
@Test(expected = IllegalArgumentException.class)
public void testAssignParameterizedTypeFromWildcardTypeThrowsException() throws Exception {
collectionWildcard.accept(TypeDescription.Generic.Visitor.Assigner.INSTANCE)
.isAssignableFrom(unboundWildcard);
}
@Test(expected = IllegalArgumentException.class)
public void testAssignIncompatibleParameterizedTypesThrowsException() throws Exception {
TypeDescription.Generic source = mock(TypeDescription.Generic.class), target = mock(TypeDescription.Generic.class);
TypeDescription erasure = mock(TypeDescription.class);
when(source.asErasure()).thenReturn(erasure);
when(target.asErasure()).thenReturn(erasure);
when(source.getTypeArguments()).thenReturn(new TypeList.Generic.Empty());
when(target.getTypeArguments()).thenReturn(new TypeList.Generic.Explicit(mock(TypeDescription.Generic.class)));
new TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType(target).onParameterizedType(source);
}
@Test
public void testObjectProperties() throws Exception {
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForGenericArray.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForNonGenericType.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForTypeVariable.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType.ParameterAssigner.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType.ParameterAssigner.InvariantBinding.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType.ParameterAssigner.CovariantBinding.class).apply();
ObjectPropertyAssertion.of(TypeDescription.Generic.Visitor.Assigner.Dispatcher.ForParameterizedType.ParameterAssigner.ContravariantBinding.class).apply();
}
@SuppressWarnings({"unused", "unchecked"})
private static class GenericTypes<T, S, U extends T, V extends List<?>> {
private Collection collectionRaw;
private Collection<?> collectionWildcard;
private Collection<T> collectionTypeVariableT;
private Collection<S> collectionTypeVariableS;
private Collection<U> collectionTypeVariableU;
private Collection<? extends T> collectionUpperBoundTypeVariableT;
private Collection<? extends S> collectionUpperBoundTypeVariableS;
private Collection<? extends U> collectionUpperBoundTypeVariableU;
private Collection<? super T> collectionLowerBoundTypeVariableT;
private Collection<? super S> collectionLowerBoundTypeVariableS;
private Collection<? super U> collectionLowerBoundTypeVariableU;
private Collection[] collectionRawArray;
private List listRaw;
private List<?> listWildcard;
private List[] listRawArray;
private List<?>[] listWildcardArray;
private AbstractList abstractListRaw;
private ArrayList arrayListRaw;
private ArrayList<?> arrayListWildcard;
private ArrayList[] arrayListRawArray;
private ArrayList<T> arrayListTypeVariableT;
private ArrayList<S> arrayListTypeVariableS;
private ArrayList<U> arrayListTypeVariableU;
private ArrayList<V> arrayListTypeVariableV;
private Callable<?> callableWildcard;
private T[] arrayTypeVariableT;
private T[][] arrayNestedTypeVariableT;
private S[] arrayTypeVariableS;
private U[] arrayTypeVariableU;
}
}
| |
package openblocks.common;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import java.util.Map;
import java.util.Random;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.passive.EntityOcelot;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import net.minecraftforge.event.entity.living.LivingDropsEvent;
import openblocks.Config;
import openblocks.OpenBlocks;
import openblocks.common.item.ItemTrophyBlock;
import openblocks.common.tileentity.TileEntityTrophy;
import openblocks.trophy.BlazeBehavior;
import openblocks.trophy.CaveSpiderBehavior;
import openblocks.trophy.CreeperBehavior;
import openblocks.trophy.EndermanBehavior;
import openblocks.trophy.ITrophyBehavior;
import openblocks.trophy.ItemDropBehavior;
import openblocks.trophy.MooshroomBehavior;
import openblocks.trophy.SkeletonBehavior;
import openblocks.trophy.SnowmanBehavior;
import openblocks.trophy.SquidBehavior;
import openblocks.trophy.WitchBehavior;
import openmods.Log;
import openmods.calc.Environment;
import openmods.calc.ExprType;
import openmods.calc.SingleExprEvaluator;
import openmods.calc.SingleExprEvaluator.EnvironmentConfigurator;
import openmods.calc.types.fp.DoubleCalculatorFactory;
import openmods.config.properties.ConfigurationChange;
import openmods.reflection.ReflectionHelper;
public class TrophyHandler {
private static final Map<Trophy, Entity> ENTITY_CACHE = Maps.newHashMap();
private final Random fallbackDropChance = new Random();
private final SingleExprEvaluator<Double, ExprType> dropChangeCalculator = SingleExprEvaluator.create(DoubleCalculatorFactory.createDefault());
{
updateDropChanceFormula();
}
@SubscribeEvent
public void onConfigChange(ConfigurationChange.Post evt) {
if (evt.check("trophy", "trophyDropChanceFormula"))
updateDropChanceFormula();
}
private void updateDropChanceFormula() {
dropChangeCalculator.setExpr(ExprType.INFIX, Config.trophyDropChanceFormula);
}
public static Entity getEntityFromCache(Trophy trophy) {
Entity entity = ENTITY_CACHE.get(trophy);
if (entity == null) {
if (!ENTITY_CACHE.containsKey(trophy)) {
try {
entity = trophy.createEntity();
} catch (Throwable t) {
Log.severe(t, "Failed to create dummy entity for trophy %s", trophy);
}
}
ENTITY_CACHE.put(trophy, entity);
}
return entity;
}
private static Entity setSlimeSize(Entity entity, int size) {
try {
ReflectionHelper.call(entity, new String[] { "func_70799_a", "setSlimeSize" }, ReflectionHelper.primitive(size));
} catch (Exception e) {
Log.warn(e, "Can't update slime size");
}
return entity;
}
public enum Trophy {
Wolf(),
Chicken(new ItemDropBehavior(10000, new ItemStack(Items.egg), "mob.chicken.plop")),
Cow(new ItemDropBehavior(20000, new ItemStack(Items.leather))),
Creeper(new CreeperBehavior()),
Skeleton(new SkeletonBehavior()),
PigZombie(new ItemDropBehavior(20000, new ItemStack(Items.gold_nugget))),
Bat(1.0, -0.3),
Zombie(),
Witch(0.35, new WitchBehavior()),
Villager(),
Ozelot() {
@Override
protected Entity createEntity() {
Entity entity = super.createEntity();
try {
((EntityOcelot)entity).setTamed(true);
} catch (ClassCastException e) {
Log.warn("Invalid cat entity class: %s", entity.getClass());
}
return entity;
}
},
Sheep(),
Blaze(new BlazeBehavior()),
Silverfish(),
Spider(),
CaveSpider(new CaveSpiderBehavior()),
Slime(0.6) {
@Override
protected Entity createEntity() {
return setSlimeSize(super.createEntity(), 1);
}
},
Ghast(0.1, 0.3),
Enderman(0.3, new EndermanBehavior()),
LavaSlime(0.6) {
@Override
protected Entity createEntity() {
return setSlimeSize(super.createEntity(), 1);
}
},
Squid(0.3, 0.5, new SquidBehavior()),
MushroomCow(new MooshroomBehavior()),
VillagerGolem(0.3),
SnowMan(new SnowmanBehavior()),
Pig(new ItemDropBehavior(20000, new ItemStack(Items.porkchop)));
private double scale = 0.4;
private double verticalOffset = 0.0;
private ITrophyBehavior behavior;
Trophy() {}
Trophy(ITrophyBehavior behavior) {
this.behavior = behavior;
}
Trophy(double scale) {
this.scale = scale;
}
Trophy(double scale, ITrophyBehavior behavior) {
this.scale = scale;
this.behavior = behavior;
}
Trophy(double scale, double verticalOffset) {
this(scale);
this.verticalOffset = verticalOffset;
}
Trophy(double scale, double verticalOffset, ITrophyBehavior behavior) {
this(scale, verticalOffset);
this.behavior = behavior;
}
public double getVerticalOffset() {
return verticalOffset;
}
public double getScale() {
return scale;
}
public Entity getEntity() {
return getEntityFromCache(this);
}
public ItemStack getItemStack() {
return ItemTrophyBlock.putMetadata(new ItemStack(OpenBlocks.Blocks.trophy), this);
}
public void playSound(World world, double x, double y, double z) {
if (world == null) return;
Entity e = getEntity();
if (e instanceof EntityLiving) {
e.posX = x;
e.posY = y;
e.posZ = z;
synchronized (e) {
e.worldObj = world;
((EntityLiving)e).playLivingSound();
e.worldObj = null;
}
}
}
public int executeActivateBehavior(TileEntityTrophy tile, EntityPlayer player) {
if (behavior != null) return behavior.executeActivateBehavior(tile, player);
return 0;
}
public void executeTickBehavior(TileEntityTrophy tile) {
if (behavior != null) behavior.executeTickBehavior(tile);
}
protected Entity createEntity() {
return EntityList.createEntityByName(toString(), null);
}
static {
ImmutableMap.Builder<String, Trophy> builder = ImmutableMap.builder();
for (Trophy t : values())
builder.put(t.name(), t);
TYPES = builder.build();
}
public final static Map<String, Trophy> TYPES;
public final static Trophy[] VALUES = values();
}
@SubscribeEvent
public void onLivingDrops(final LivingDropsEvent event) {
final Entity entity = event.entity;
if (event.recentlyHit && canDrop(entity)) {
final Double result = dropChangeCalculator.evaluate(
new EnvironmentConfigurator<Double>() {
@Override
public void accept(Environment<Double> env) {
env.setGlobalSymbol("looting", Double.valueOf(event.lootingLevel));
env.setGlobalSymbol("specialDrop", Double.valueOf(event.specialDropValue));
env.setGlobalSymbol("chance", Config.trophyDropChance);
}
}, new Supplier<Double>() {
@Override
public Double get() {
final double bias = fallbackDropChance.nextDouble() / 4;
final double selection = fallbackDropChance.nextDouble();
return (event.lootingLevel + bias) * Config.trophyDropChance - selection;
}
});
if (result > 0) {
final String entityName = EntityList.getEntityString(entity);
if (!Strings.isNullOrEmpty(entityName)) {
Trophy mobTrophy = Trophy.TYPES.get(entityName);
if (mobTrophy != null) {
EntityItem drop = new EntityItem(entity.worldObj, entity.posX, entity.posY, entity.posZ, mobTrophy.getItemStack());
drop.delayBeforeCanPickup = 10;
event.drops.add(drop);
}
}
}
}
}
private static boolean canDrop(Entity entity) {
final World world = entity.worldObj;
return world != null && world.getGameRules().getGameRuleBooleanValue("doMobLoot");
}
}
| |
package com.sequenceiq.freeipa.service.stack;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import java.util.List;
import java.util.Set;
import org.junit.Assert;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import com.google.common.collect.Sets;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.Status;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.instance.InstanceGroupType;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.instance.InstanceStatus;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.health.HealthDetailsFreeIpaResponse;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.health.NodeHealthDetails;
import com.sequenceiq.freeipa.client.FreeIpaClientException;
import com.sequenceiq.freeipa.entity.InstanceGroup;
import com.sequenceiq.freeipa.entity.InstanceMetaData;
import com.sequenceiq.freeipa.entity.Stack;
import io.opentracing.Tracer;
@ExtendWith(MockitoExtension.class)
public class FreeIpaStackHealthDetailsServiceTest {
private static final String ENVIRONMENT_ID = "crn:cdp:environments:us-west-1:f39af961-e0ce-4f79-826c-45502efb9ca3:environment:12345-6789";
private static final String ACCOUNT_ID = "accountId";
private static FreeIpaClientException ipaClientException;
private static final String HOST1 = "host1.domain";
private static final String HOST2 = "host2.domain";
private static final String INSTANCE_ID1 = "i-0123456789";
private static final String INSTANCE_ID2 = "i-9876543210";
@Mock
private Tracer tracer;
@Mock
private StackService stackService;
@Mock
private FreeIpaInstanceHealthDetailsService freeIpaInstanceHealthDetailsService;
@InjectMocks
private FreeIpaStackHealthDetailsService underTest;
private NodeHealthDetails getGoodDetails1() {
NodeHealthDetails nodeHealthDetails = new NodeHealthDetails();
nodeHealthDetails.setInstanceId(INSTANCE_ID1);
nodeHealthDetails.setName(HOST1);
nodeHealthDetails.setStatus(InstanceStatus.CREATED);
return nodeHealthDetails;
}
private NodeHealthDetails getGoodDetails2() {
NodeHealthDetails nodeHealthDetails = new NodeHealthDetails();
nodeHealthDetails.setInstanceId(INSTANCE_ID2);
nodeHealthDetails.setName(HOST2);
nodeHealthDetails.setStatus(InstanceStatus.CREATED);
return nodeHealthDetails;
}
private NodeHealthDetails getUnhealthyDetails1() {
NodeHealthDetails nodeHealthDetails = new NodeHealthDetails();
nodeHealthDetails.setInstanceId(INSTANCE_ID1);
nodeHealthDetails.setName(HOST1);
nodeHealthDetails.setStatus(InstanceStatus.UNHEALTHY);
nodeHealthDetails.setIssues(List.of("failed"));
return nodeHealthDetails;
}
private NodeHealthDetails getUnhealthyDetails2() {
NodeHealthDetails nodeHealthDetails = new NodeHealthDetails();
nodeHealthDetails.setInstanceId(INSTANCE_ID2);
nodeHealthDetails.setName(HOST2);
nodeHealthDetails.setStatus(InstanceStatus.UNHEALTHY);
nodeHealthDetails.setIssues(List.of("failed"));
return nodeHealthDetails;
}
private Stack getStack() {
Stack stack = new Stack();
stack.setResourceCrn(ENVIRONMENT_ID);
InstanceGroup instanceGroup = new InstanceGroup();
stack.getInstanceGroups().add(instanceGroup);
instanceGroup.setInstanceGroupType(InstanceGroupType.MASTER);
InstanceMetaData instanceMetaData = new InstanceMetaData();
instanceMetaData.setInstanceId(INSTANCE_ID1);
instanceMetaData.setDiscoveryFQDN(HOST1);
instanceMetaData.setInstanceStatus(InstanceStatus.CREATED);
instanceGroup.setInstanceMetaData(Sets.newHashSet(instanceMetaData));
return stack;
}
private Stack getStackTwoInstances(InstanceMetaData im1, InstanceMetaData im2) {
Stack stack = new Stack();
stack.setResourceCrn(ENVIRONMENT_ID);
InstanceGroup instanceGroup = new InstanceGroup();
stack.getInstanceGroups().add(instanceGroup);
instanceGroup.setInstanceGroupType(InstanceGroupType.MASTER);
instanceGroup.setInstanceMetaData(Set.of(im1, im2));
return stack;
}
private Stack getDeletedStack() {
Stack stack = new Stack();
stack.setResourceCrn(ENVIRONMENT_ID);
InstanceGroup instanceGroup = new InstanceGroup();
stack.getInstanceGroups().add(instanceGroup);
instanceGroup.setInstanceGroupType(InstanceGroupType.MASTER);
InstanceMetaData instanceMetaData = new InstanceMetaData();
instanceMetaData.setInstanceStatus(InstanceStatus.TERMINATED);
instanceMetaData.setInstanceId(INSTANCE_ID1);
instanceGroup.setInstanceMetaData(Sets.newHashSet(instanceMetaData));
instanceMetaData.setDiscoveryFQDN(HOST1);
return stack;
}
private InstanceMetaData getInstance1() {
InstanceMetaData instanceMetaData = new InstanceMetaData();
instanceMetaData.setInstanceId(INSTANCE_ID1);
instanceMetaData.setDiscoveryFQDN(HOST1);
instanceMetaData.setInstanceStatus(InstanceStatus.CREATED);
return instanceMetaData;
}
private InstanceMetaData getInstance2() {
InstanceMetaData instanceMetaData = new InstanceMetaData();
instanceMetaData.setInstanceId(INSTANCE_ID2);
instanceMetaData.setDiscoveryFQDN(HOST2);
instanceMetaData.setInstanceStatus(InstanceStatus.CREATED);
return instanceMetaData;
}
@BeforeAll
public static void init() {
ipaClientException = new FreeIpaClientException("Error during healthcheck");
}
@Test
public void testNodeDeletedOnProvider() throws Exception {
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getDeletedStack());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertFalse(response.getNodeHealthDetails().isEmpty());
Assert.assertTrue(response.getNodeHealthDetails().stream().findFirst().get().getStatus() == InstanceStatus.TERMINATED);
}
@Test
public void testHealthySingleNode() throws Exception {
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStack());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), any())).thenReturn(getGoodDetails1());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.AVAILABLE, response.getStatus());
Assert.assertFalse(response.getNodeHealthDetails().isEmpty());
for (NodeHealthDetails nodeHealth:response.getNodeHealthDetails()) {
Assert.assertTrue(nodeHealth.getIssues().isEmpty());
Assert.assertEquals(InstanceStatus.CREATED, nodeHealth.getStatus());
}
}
@Test
public void testUnhealthySingleNode() throws Exception {
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStack());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), any())).thenReturn(getUnhealthyDetails1());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertFalse(response.getNodeHealthDetails().isEmpty());
for (NodeHealthDetails nodeHealth:response.getNodeHealthDetails()) {
Assert.assertFalse(nodeHealth.getIssues().isEmpty());
Assert.assertEquals(InstanceStatus.UNHEALTHY, nodeHealth.getStatus());
}
}
@Test
public void testUnresponsiveSingleNode() throws Exception {
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStack());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), any())).thenThrow(ipaClientException);
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNREACHABLE, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 1);
for (NodeHealthDetails nodeHealth:response.getNodeHealthDetails()) {
Assert.assertTrue(!nodeHealth.getIssues().isEmpty());
Assert.assertEquals(InstanceStatus.UNREACHABLE, nodeHealth.getStatus());
Assert.assertTrue(nodeHealth.getIssues().size() == 1);
Assert.assertTrue(nodeHealth.getIssues().get(0).equals("Error during healthcheck"));
}
}
@Test
public void testUnresponsiveSingleNodeThatThrowsRuntimeException() throws Exception {
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStack());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), any())).thenThrow(new RuntimeException("Expected"));
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNREACHABLE, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 1);
for (NodeHealthDetails nodeHealth:response.getNodeHealthDetails()) {
Assert.assertTrue(!nodeHealth.getIssues().isEmpty());
Assert.assertEquals(InstanceStatus.UNREACHABLE, nodeHealth.getStatus());
Assert.assertTrue(nodeHealth.getIssues().size() == 1);
Assert.assertTrue(nodeHealth.getIssues().get(0).equals("Expected"));
}
}
@Test
public void testUnresponsiveSecondaryNode() throws Exception {
InstanceMetaData im1 = getInstance1();
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im1))).thenReturn(getGoodDetails1());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im2))).thenThrow(ipaClientException);
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testTwoGoodNodes() throws Exception {
InstanceMetaData im1 = getInstance1();
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im1))).thenReturn(getGoodDetails1());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im2))).thenReturn(getGoodDetails2());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.AVAILABLE, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testOneGoodOneUnhealthyNode() throws Exception {
InstanceMetaData im1 = getInstance1();
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im1))).thenReturn(getGoodDetails1());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im2))).thenReturn(getUnhealthyDetails2());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testTwoUnhealthyNodes() throws Exception {
InstanceMetaData im1 = getInstance1();
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im1))).thenReturn(getUnhealthyDetails1());
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im2))).thenReturn(getUnhealthyDetails2());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testOneStoppedOneGoodNode() throws Exception {
InstanceMetaData im1 = getInstance1();
im1.setInstanceStatus(InstanceStatus.STOPPED);
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), eq(im2))).thenReturn(getUnhealthyDetails2());
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testTwoStoppedNodes() throws Exception {
InstanceMetaData im1 = getInstance1();
im1.setInstanceStatus(InstanceStatus.STOPPED);
InstanceMetaData im2 = getInstance2();
im2.setInstanceStatus(InstanceStatus.STOPPED);
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.STOPPED, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testTwoFailedNodes() throws Exception {
InstanceMetaData im1 = getInstance1();
im1.setInstanceStatus(InstanceStatus.FAILED);
InstanceMetaData im2 = getInstance2();
im2.setInstanceStatus(InstanceStatus.FAILED);
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNHEALTHY, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
@Test
public void testTwoUnresponsiveNodes() throws Exception {
InstanceMetaData im1 = getInstance1();
InstanceMetaData im2 = getInstance2();
Mockito.when(stackService.getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(anyString(), anyString())).thenReturn(getStackTwoInstances(im1, im2));
Mockito.when(freeIpaInstanceHealthDetailsService.getInstanceHealthDetails(any(), any())).thenThrow(ipaClientException);
HealthDetailsFreeIpaResponse response = underTest.getHealthDetails(ENVIRONMENT_ID, ACCOUNT_ID);
Assert.assertEquals(Status.UNREACHABLE, response.getStatus());
Assert.assertTrue(response.getNodeHealthDetails().size() == 2);
}
}
| |
/**
* This file is part of WiiuseJ.
*
* WiiuseJ is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* WiiuseJ is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with WiiuseJ. If not, see <http://www.gnu.org/licenses/>.
*/
package org.myrobotlab.control;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Random;
import javax.swing.ImageIcon;
import javax.swing.JList;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import org.slf4j.Logger;
import org.myrobotlab.logging.LoggerFactory;
import javax.swing.JTabbedPane;
import org.myrobotlab.control.widget.Number;
import org.myrobotlab.image.SerializableImage;
import org.myrobotlab.service.Wii.IRData;
import org.myrobotlab.service.GUIService;
import org.myrobotlab.service.interfaces.VideoGUISource;
import wiiusej.values.IRSource;
public class WiiGUI extends ServiceGUI implements ListSelectionListener, VideoGUISource {
static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(WiiGUI.class.toString());
VideoWidget video0 = null;
Graphics cam = null;
BufferedImage camImage = null;
int width = 1024;
int height = 768;
int divisor = 2;
ArrayList<IRData> irdata = new ArrayList<IRData>();
final int LEFT = 0;
final int RIGHT = 1;
final int UNKNOWN = -1;
int currentDirection = UNKNOWN;
int lastDirection = UNKNOWN;
Number sensitivity = new Number("sensitivity", 5, 1, 5, "ir camera sensitivity");
public Random rand = new Random();
public IRData lastIRData = null;
public WiiGUI(final String boundServiceName, final GUIService myService, final JTabbedPane tabs) {
super(boundServiceName, myService, tabs);
}
@Override
public void init() {
video0 = new VideoWidget(boundServiceName, myService, tabs);
video0.init();
camImage = new BufferedImage(width / divisor, height / divisor, BufferedImage.TYPE_INT_RGB);
cam = camImage.getGraphics();
video0.displayFrame(new SerializableImage(camImage, boundServiceName));
gc.gridx = 0;
gc.gridy = 0;
gc.gridheight = 4;
gc.gridwidth = 2;
display.add(video0.display, gc);
gc.gridx = 2;
gc.gridx = 0;
gc.gridheight = 1;
gc.gridwidth = 1;
gc.gridy = 5;
display.add(sensitivity.getDisplay(), gc);
/*
* led1Button = new JButton(); led2Button = new JButton(); led3Button =
* new JButton(); led4Button = new JButton();
*/
setCurrentFilterMouseListener();
}
protected ImageIcon createImageIcon(String path, String description) {
java.net.URL imgURL = getClass().getResource(path);
if (imgURL != null) {
return new ImageIcon(imgURL, description);
} else {
System.err.println("Couldn't find file: " + path);
return null;
}
}
public void displayFrame(SerializableImage camImage) {
video0.displayFrame(camImage);
}
@Override
public void attachGUI() {
video0.attachGUI();
subscribe("publishIR", "publishIR", IRData.class);
video0.displayFrame(new SerializableImage(camImage, boundServiceName));
}
int x;
int y;
public void display(IRData ire, Color color) {
cam.setColor(color);
for (int i = 0; i < ire.event.getIRPoints().length; ++i) {
IRSource ir = ire.event.getIRPoints()[i];
if (ir != null) {
x = width / divisor - ir.getX() / divisor;
y = height / divisor - ir.getY() / divisor;
cam.fillArc(x, y, ir.getSize() * 3, ir.getSize() * 3, 0, 360);
// cam.drawString(ire.event.getWiimoteId() + " s" + ir.getSize()
// + " " + ir.getX() + "," + ir.getY(), x + 5, y);
cam.drawString(ire.event.getWiimoteId() + " " + ir.getX() + "," + ir.getY() + " s" + ir.getSize(), x - 30, y);
}
}
}
int cnt = 0;
int x0 = 0;
int y0 = 0;
int lastMin = width;
int lastMax = 0;
long timeStart = 0;
long timeEnd = 0;
int sweepTimeDelta = 0;
int deltaTime = 0;
public void publishIR(IRData ire) {
++cnt;
if (lastIRData != null) {
// remove last point
display(lastIRData, Color.black);
}
// display this point
display(ire, Color.red);
lastIRData = ire;
video0.displayFrame(new SerializableImage(camImage, boundServiceName));
}
@Override
public void detachGUI() {
video0.detachGUI();
unsubscribe("publishIR", "publishIR", IRData.class);
}
// TODO - encapsulate this
// MouseListener mouseListener = new MouseAdapter() {
public void setCurrentFilterMouseListener() {
MouseListener mouseListener = new MouseAdapter() {
public void mouseClicked(MouseEvent mouseEvent) {
JList theList = (JList) mouseEvent.getSource();
if (mouseEvent.getClickCount() == 2) {
int index = theList.locationToIndex(mouseEvent.getPoint());
if (index >= 0) {
Object o = theList.getModel().getElementAt(index);
System.out.println("Double-clicked on: " + o.toString());
}
}
}
};
}
@Override
public VideoWidget getLocalDisplay() {
return video0;
}
@Override
public void valueChanged(ListSelectionEvent arg0) {
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.get;
import com.google.common.collect.Iterators;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.*;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest {
/**
* A single get item.
*/
public static class Item implements Streamable, IndicesRequest {
private String index;
private String type;
private String id;
private String routing;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private FetchSourceContext fetchSourceContext;
public Item() {
}
/**
* Constructs a single get item.
*
* @param index The index name
* @param type The type (can be null)
* @param id The id
*/
public Item(String index, @Nullable String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
public String index() {
return this.index;
}
@Override
public String[] indices() {
return new String[]{index};
}
@Override
public IndicesOptions indicesOptions() {
return GetRequest.INDICES_OPTIONS;
}
public Item index(String index) {
this.index = index;
return this;
}
public String type() {
return this.type;
}
public Item type(String type) {
this.type = type;
return this;
}
public String id() {
return this.id;
}
/**
* The routing associated with this document.
*/
public Item routing(String routing) {
this.routing = routing;
return this;
}
public String routing() {
return this.routing;
}
public Item parent(String parent) {
if (routing == null) {
this.routing = parent;
}
return this;
}
public Item fields(String... fields) {
this.fields = fields;
return this;
}
public String[] fields() {
return this.fields;
}
public long version() {
return version;
}
public Item version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public Item versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
/**
* Allows setting the {@link FetchSourceContext} for this request, controlling if and how _source should be returned.
*/
public Item fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
public static Item readItem(StreamInput in) throws IOException {
Item item = new Item();
item.readFrom(in);
return item;
}
@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readString();
type = in.readOptionalString();
id = in.readString();
routing = in.readOptionalString();
int size = in.readVInt();
if (size > 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
fetchSourceContext = FetchSourceContext.optionalReadFromStream(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeOptionalString(type);
out.writeString(id);
out.writeOptionalString(routing);
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
out.writeLong(version);
out.writeByte(versionType.getValue());
FetchSourceContext.optionalWriteToStream(fetchSourceContext, out);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Item)) return false;
Item item = (Item) o;
if (version != item.version) return false;
if (fetchSourceContext != null ? !fetchSourceContext.equals(item.fetchSourceContext) : item.fetchSourceContext != null)
return false;
if (!Arrays.equals(fields, item.fields)) return false;
if (!id.equals(item.id)) return false;
if (!index.equals(item.index)) return false;
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
if (type != null ? !type.equals(item.type) : item.type != null) return false;
if (versionType != item.versionType) return false;
return true;
}
@Override
public int hashCode() {
int result = index.hashCode();
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + id.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0);
result = 31 * result + (int) (version ^ (version >>> 32));
result = 31 * result + versionType.hashCode();
result = 31 * result + (fetchSourceContext != null ? fetchSourceContext.hashCode() : 0);
return result;
}
}
String preference;
Boolean realtime;
boolean refresh;
public boolean ignoreErrorsOnGeneratedFields = false;
List<Item> items = new ArrayList<>();
public MultiGetRequest() {
}
/**
* Creates a multi get request caused by some other request, which is provided as an
* argument so that its headers and context can be copied to the new request
*/
public MultiGetRequest(ActionRequest request) {
super(request);
}
public List<Item> getItems() {
return this.items;
}
public MultiGetRequest add(Item item) {
items.add(item);
return this;
}
public MultiGetRequest add(String index, @Nullable String type, String id) {
items.add(new Item(index, type, id));
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (items.isEmpty()) {
validationException = ValidateActions.addValidationError("no documents to get", validationException);
} else {
for (int i = 0; i < items.size(); i++) {
Item item = items.get(i);
if (item.index() == null) {
validationException = ValidateActions.addValidationError("index is missing for doc " + i, validationException);
}
if (item.id() == null) {
validationException = ValidateActions.addValidationError("id is missing for doc " + i, validationException);
}
}
}
return validationException;
}
@Override
public List<? extends IndicesRequest> subRequests() {
return items;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public MultiGetRequest preference(String preference) {
this.preference = preference;
return this;
}
public String preference() {
return this.preference;
}
public boolean realtime() {
return this.realtime == null ? true : this.realtime;
}
public MultiGetRequest realtime(Boolean realtime) {
this.realtime = realtime;
return this;
}
public boolean refresh() {
return this.refresh;
}
public MultiGetRequest refresh(boolean refresh) {
this.refresh = refresh;
return this;
}
public MultiGetRequest ignoreErrorsOnGeneratedFields(boolean ignoreErrorsOnGeneratedFields) {
this.ignoreErrorsOnGeneratedFields = ignoreErrorsOnGeneratedFields;
return this;
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, byte[] data, int from, int length) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, new BytesArray(data, from, length), true);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, data, true);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data, boolean allowExplicitIndex) throws Exception {
return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, null, data, allowExplicitIndex);
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, BytesReference data, boolean allowExplicitIndex) throws Exception {
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("docs".equals(currentFieldName)) {
parseDocuments(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting, allowExplicitIndex);
} else if ("ids".equals(currentFieldName)) {
parseIds(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting);
}
}
}
}
return this;
}
public static void parseDocuments(XContentParser parser, List<Item> items, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, boolean allowExplicitIndex) throws IOException {
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchIllegalArgumentException("docs array element should include an object");
}
String index = defaultIndex;
String type = defaultType;
String id = null;
String routing = defaultRouting;
String parent = null;
List<String> fields = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
FetchSourceContext fetchSourceContext = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new ElasticsearchIllegalArgumentException("explicit index in multi get is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>();
fields.add(parser.text());
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_source".equals(currentFieldName)) {
if (parser.isBooleanValue()) {
fetchSourceContext = new FetchSourceContext(parser.booleanValue());
} else if (token == XContentParser.Token.VALUE_STRING) {
fetchSourceContext = new FetchSourceContext(new String[]{parser.text()});
} else {
throw new ElasticsearchParseException("illegal type for _source: [" + token + "]");
}
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else if ("_source".equals(currentFieldName)) {
ArrayList<String> includes = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
includes.add(parser.text());
}
fetchSourceContext = new FetchSourceContext(includes.toArray(Strings.EMPTY_ARRAY));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("_source".equals(currentFieldName)) {
List<String> currentList = null, includes = null, excludes = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if ("includes".equals(currentFieldName) || "include".equals(currentFieldName)) {
currentList = includes != null ? includes : (includes = new ArrayList<>(2));
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
} else {
throw new ElasticsearchParseException("Source definition may not contain " + parser.text());
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
currentList.add(parser.text());
}
} else if (token.isValue()) {
currentList.add(parser.text());
} else {
throw new ElasticsearchParseException("unexpected token while parsing source settings");
}
}
fetchSourceContext = new FetchSourceContext(
includes == null ? Strings.EMPTY_ARRAY : includes.toArray(new String[includes.size()]),
excludes == null ? Strings.EMPTY_ARRAY : excludes.toArray(new String[excludes.size()]));
}
}
}
String[] aFields;
if (fields != null) {
aFields = fields.toArray(new String[fields.size()]);
} else {
aFields = defaultFields;
}
items.add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent).version(version).versionType(versionType)
.fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext));
}
}
public static void parseDocuments(XContentParser parser, List<Item> items) throws IOException {
parseDocuments(parser, items, null, null, null, null, null, true);
}
public static void parseIds(XContentParser parser, List<Item> items, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (!token.isValue()) {
throw new ElasticsearchIllegalArgumentException("ids array element should only contain ids");
}
items.add(new Item(defaultIndex, defaultType, parser.text()).fields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting));
}
}
public static void parseIds(XContentParser parser, List<Item> items) throws IOException {
parseIds(parser, items, null, null, null, null, null);
}
@Override
public Iterator<Item> iterator() {
return Iterators.unmodifiableIterator(items.iterator());
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
preference = in.readOptionalString();
refresh = in.readBoolean();
byte realtime = in.readByte();
if (realtime == 0) {
this.realtime = false;
} else if (realtime == 1) {
this.realtime = true;
}
ignoreErrorsOnGeneratedFields = in.readBoolean();
int size = in.readVInt();
items = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
items.add(Item.readItem(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(preference);
out.writeBoolean(refresh);
if (realtime == null) {
out.writeByte((byte) -1);
} else if (realtime == false) {
out.writeByte((byte) 0);
} else {
out.writeByte((byte) 1);
}
out.writeBoolean(ignoreErrorsOnGeneratedFields);
out.writeVInt(items.size());
for (Item item : items) {
item.writeTo(out);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.writer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
import org.apache.carbondata.core.reader.CarbonDictionaryColumnMetaChunk;
import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReader;
import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReaderImpl;
import org.apache.carbondata.core.service.CarbonCommonFactory;
import org.apache.carbondata.core.service.PathService;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.core.util.path.HDFSLeaseUtils;
import org.apache.carbondata.format.ColumnDictionaryChunk;
import org.apache.carbondata.format.ColumnDictionaryChunkMeta;
import org.apache.thrift.TBase;
/**
* This class is responsible for writing the dictionary file and its metadata
*/
public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
/**
* LOGGER
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(CarbonDictionaryWriterImpl.class.getName());
/**
* carbon type identifier
*/
protected CarbonTableIdentifier carbonTableIdentifier;
/**
* list which will hold values upto maximum of one dictionary chunk size
*/
private List<ByteBuffer> oneDictionaryChunkList;
/**
* Meta object which will hold last segment entry details
*/
private CarbonDictionaryColumnMetaChunk chunkMetaObjectForLastSegmentEntry;
/**
* dictionary file and meta thrift writer
*/
private ThriftWriter dictionaryThriftWriter;
/**
* column identifier
*/
protected DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier;
/**
* carbon dictionary data store path
*/
protected String storePath;
/**
* dictionary file path
*/
protected String dictionaryFilePath;
/**
* dictionary metadata file path
*/
protected String dictionaryMetaFilePath;
/**
* start offset of dictionary chunk for a segment
*/
private long chunk_start_offset;
/**
* end offset of a dictionary chunk for a segment
*/
private long chunk_end_offset;
/**
* total dictionary value record count for one segment
*/
private int totalRecordCount;
/**
* total thrift object chunk count written for one segment
*/
private int chunk_count;
/**
* chunk size for a dictionary file after which data will be written to disk
*/
private int dictionary_one_chunk_size;
/**
* flag to check whether write method is called for first time
*/
private boolean isFirstTime;
private static final Charset defaultCharset = Charset.forName(
CarbonCommonConstants.DEFAULT_CHARSET);
/**
* Constructor
*
* @param storePath carbon dictionary data store path
* @param carbonTableIdentifier table identifier which will give table name and database name
* @param dictionaryColumnUniqueIdentifier column unique identifier
*/
public CarbonDictionaryWriterImpl(String storePath, CarbonTableIdentifier carbonTableIdentifier,
DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier) {
this.carbonTableIdentifier = carbonTableIdentifier;
this.dictionaryColumnUniqueIdentifier = dictionaryColumnUniqueIdentifier;
this.storePath = storePath;
this.isFirstTime = true;
}
/**
* This method will write the data in thrift format to disk. This method will be guided by
* parameter dictionary_one_chunk_size and data will be divided into chunks
* based on this parameter
*
* @param value unique dictionary value
* @throws IOException if an I/O error occurs
*/
@Override public void write(String value) throws IOException {
write(value.getBytes(defaultCharset));
}
/**
* This method will write the data in thrift format to disk. This method will be guided by
* parameter dictionary_one_chunk_size and data will be divided into chunks
* based on this parameter
*
* @param value unique dictionary value
* @throws IOException if an I/O error occurs
*/
@Override public void write(byte[] value) throws IOException {
if (isFirstTime) {
init();
isFirstTime = false;
}
// if one chunk size is equal to list size then write the data to file
checkAndWriteDictionaryChunkToFile();
oneDictionaryChunkList.add(ByteBuffer.wrap(value));
totalRecordCount++;
}
/**
* This method will write the data in thrift format to disk. This method will not be guided by
* parameter dictionary_one_chunk_size and complete data will be written as one chunk
*
* @param valueList list of byte array. Each byte array is unique dictionary value
* @throws IOException if an I/O error occurs
*/
@Override public void write(List<byte[]> valueList) throws IOException {
if (isFirstTime) {
init();
isFirstTime = false;
}
for (byte[] value : valueList) {
oneDictionaryChunkList.add(ByteBuffer.wrap(value));
totalRecordCount++;
}
}
/**
* write dictionary metadata file and close thrift object
*
* @throws IOException if an I/O error occurs
*/
@Override public void close() throws IOException {
if (null != dictionaryThriftWriter && dictionaryThriftWriter.isOpen()) {
try {
// if stream is open then only need to write dictionary file.
writeDictionaryFile();
} finally {
// close the thrift writer for dictionary file
closeThriftWriter();
}
}
}
/**
* check if the threshold has been reached for the number of
* values that can kept in memory and then flush the data to file
*
* @throws IOException if an I/O error occurs
*/
private void checkAndWriteDictionaryChunkToFile() throws IOException {
if (oneDictionaryChunkList.size() >= dictionary_one_chunk_size) {
writeDictionaryFile();
createChunkList();
}
}
/**
* This method will serialize the object of dictionary file
*
* @throws IOException if an I/O error occurs
*/
private void writeDictionaryFile() throws IOException {
ColumnDictionaryChunk columnDictionaryChunk = new ColumnDictionaryChunk();
columnDictionaryChunk.setValues(oneDictionaryChunkList);
writeThriftObject(columnDictionaryChunk);
}
/**
* This method will check and created the directory path where dictionary file has to be created
*
* @throws IOException if an I/O error occurs
*/
private void init() throws IOException {
initDictionaryChunkSize();
initPaths();
boolean dictFileExists = CarbonUtil.isFileExists(this.dictionaryFilePath);
if (dictFileExists && CarbonUtil.isFileExists(this.dictionaryMetaFilePath)) {
this.chunk_start_offset = CarbonUtil.getFileSize(this.dictionaryFilePath);
validateDictionaryFileOffsetWithLastSegmentEntryOffset();
} else if (dictFileExists) {
FileFactory.getCarbonFile(dictionaryFilePath, FileFactory.getFileType(dictionaryFilePath))
.delete();
}
openThriftWriter(this.dictionaryFilePath);
createChunkList();
}
protected void initPaths() {
PathService pathService = CarbonCommonFactory.getPathService();
CarbonTablePath carbonTablePath = pathService
.getCarbonTablePath(this.storePath, carbonTableIdentifier,
dictionaryColumnUniqueIdentifier);
this.dictionaryFilePath = carbonTablePath.getDictionaryFilePath(
dictionaryColumnUniqueIdentifier.getColumnIdentifier().getColumnId());
this.dictionaryMetaFilePath = carbonTablePath.getDictionaryMetaFilePath(
dictionaryColumnUniqueIdentifier.getColumnIdentifier().getColumnId());
}
/**
* initialize the value of dictionary chunk that can be kept in memory at a time
*/
private void initDictionaryChunkSize() {
dictionary_one_chunk_size = CarbonUtil.getDictionaryChunkSize();
}
/**
* initialise one dictionary size chunk list and increment chunk count
*/
private void createChunkList() {
this.oneDictionaryChunkList = new ArrayList<ByteBuffer>(dictionary_one_chunk_size);
chunk_count++;
}
/**
* if file already exists then read metadata file and
* validate the last entry end offset with file size. If
* they are not equal that means some invalid data is present which needs
* to be truncated
*
* @throws IOException if an I/O error occurs
*/
private void validateDictionaryFileOffsetWithLastSegmentEntryOffset() throws IOException {
// read last dictionary chunk meta entry from dictionary metadata file
chunkMetaObjectForLastSegmentEntry = getChunkMetaObjectForLastSegmentEntry();
int bytesToTruncate = 0;
if (null != chunkMetaObjectForLastSegmentEntry) {
bytesToTruncate =
(int) (chunk_start_offset - chunkMetaObjectForLastSegmentEntry.getEnd_offset());
}
if (bytesToTruncate > 0) {
LOGGER.info("some inconsistency in dictionary file for column "
+ this.dictionaryColumnUniqueIdentifier.getColumnIdentifier());
// truncate the dictionary data till chunk meta end offset
FileFactory.FileType fileType = FileFactory.getFileType(this.dictionaryFilePath);
CarbonFile carbonFile = FileFactory.getCarbonFile(this.dictionaryFilePath, fileType);
boolean truncateSuccess = carbonFile
.truncate(this.dictionaryFilePath, chunkMetaObjectForLastSegmentEntry.getEnd_offset());
if (!truncateSuccess) {
LOGGER.info("Diction file not truncated successfully for column "
+ this.dictionaryColumnUniqueIdentifier.getColumnIdentifier());
}
}
}
/**
* This method will write the dictionary metadata file for a given column
*
* @throws IOException if an I/O error occurs
*/
private void writeDictionaryMetadataFile() throws IOException {
// Format of dictionary metadata file
// min, max, start offset, end offset and chunk count
int min_surrogate_key = 0;
int max_surrogate_key = 0;
// case 1: first time dictionary writing
// previousMax = 0, totalRecordCount = 5, min = 1, max= 5
// case2: file already exists
// previousMax = 5, totalRecordCount = 10, min = 6, max = 15
// case 3: no unique values, total records 0
// previousMax = 15, totalRecordCount = 0, min = 15, max = 15
// both min and max equal to previous max
if (null != chunkMetaObjectForLastSegmentEntry) {
if (0 == totalRecordCount) {
min_surrogate_key = chunkMetaObjectForLastSegmentEntry.getMax_surrogate_key();
} else {
min_surrogate_key = chunkMetaObjectForLastSegmentEntry.getMax_surrogate_key() + 1;
}
max_surrogate_key =
chunkMetaObjectForLastSegmentEntry.getMax_surrogate_key() + totalRecordCount;
} else {
if (totalRecordCount > 0) {
min_surrogate_key = 1;
}
max_surrogate_key = totalRecordCount;
}
ColumnDictionaryChunkMeta dictionaryChunkMeta =
new ColumnDictionaryChunkMeta(min_surrogate_key, max_surrogate_key, chunk_start_offset,
chunk_end_offset, chunk_count);
try {
openThriftWriter(this.dictionaryMetaFilePath);
// write dictionary metadata file
writeThriftObject(dictionaryChunkMeta);
LOGGER.info("Dictionary metadata file written successfully for column "
+ this.dictionaryColumnUniqueIdentifier.getColumnIdentifier() + " at path "
+ this.dictionaryMetaFilePath);
} finally {
closeThriftWriter();
}
}
/**
* open thrift writer for writing dictionary chunk/meta object
*
* @param dictionaryFile can be dictionary file name or dictionary metadata file name
* @throws IOException if an I/O error occurs
*/
private void openThriftWriter(String dictionaryFile) throws IOException {
// create thrift writer instance
dictionaryThriftWriter = new ThriftWriter(dictionaryFile, true);
// open the file stream
try {
dictionaryThriftWriter.open();
} catch (IOException e) {
// Cases to handle
// 1. Handle File lease recovery
if (HDFSLeaseUtils.checkExceptionMessageForLeaseRecovery(e.getMessage())) {
LOGGER.error(e, "Lease recovery exception encountered for file: " + dictionaryFile);
boolean leaseRecovered = HDFSLeaseUtils.recoverFileLease(dictionaryFile);
if (leaseRecovered) {
// try to open output stream again after recovering the lease on file
dictionaryThriftWriter.open();
} else {
throw e;
}
} else {
throw e;
}
}
}
/**
* This method will write the thrift object to a file
*
* @param dictionaryThriftObject can be dictionary thrift object or dictionary metadata
* thrift object
* @throws IOException if an I/O error occurs
*/
private void writeThriftObject(TBase dictionaryThriftObject) throws IOException {
dictionaryThriftWriter.write(dictionaryThriftObject);
}
/**
* close dictionary thrift writer
*/
private void closeThriftWriter() throws IOException {
if (null != dictionaryThriftWriter) {
dictionaryThriftWriter.close();
}
}
/**
* This method will read the dictionary chunk metadata thrift object for last entry
*
* @return last entry of dictionary meta chunk
* @throws IOException if an I/O error occurs
*/
private CarbonDictionaryColumnMetaChunk getChunkMetaObjectForLastSegmentEntry()
throws IOException {
CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk = null;
CarbonDictionaryMetadataReader columnMetadataReaderImpl = getDictionaryMetadataReader();
try {
// read the last segment entry for dictionary metadata
carbonDictionaryColumnMetaChunk =
columnMetadataReaderImpl.readLastEntryOfDictionaryMetaChunk();
} finally {
// Close metadata reader
columnMetadataReaderImpl.close();
}
return carbonDictionaryColumnMetaChunk;
}
/**
* @return
*/
protected CarbonDictionaryMetadataReader getDictionaryMetadataReader() {
return new CarbonDictionaryMetadataReaderImpl(storePath, carbonTableIdentifier,
dictionaryColumnUniqueIdentifier);
}
@Override public void commit() throws IOException {
if (null != dictionaryThriftWriter && dictionaryThriftWriter.isOpen()) {
this.chunk_end_offset = CarbonUtil.getFileSize(this.dictionaryFilePath);
writeDictionaryMetadataFile();
}
}
}
| |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.provider.description.interestrate;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.forex.method.FXMatrix;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.instrument.index.IndexON;
import com.opengamma.analytics.financial.legalentity.LegalEntity;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
import com.opengamma.util.tuple.DoublesPair;
/**
* Class describing a multi-curves provider created from a issuer provider where the discounting curve
* for one issuer replace (decorate) the discounting curve for one currency.
*/
public class MulticurveProviderDiscountingDecoratedIssuer implements MulticurveProviderInterface {
/**
* The underlying Issuer provider on which the multi-curves provider is based.
*/
private final IssuerProviderInterface _issuerProvider;
/**
* The currency for which the discounting curve will be replaced (decorated).
*/
private final Currency _decoratedCurrency;
/**
* The issuer for which the associated discounting curve will replace the currency discounting curve.
*/
private final LegalEntity _decoratingIssuer;
/**
* Constructor.
* @param issuerProvider The underlying issuer provider on which the multi-curves provider is based, not null
* @param decoratedCurrency The currency for which the discounting curve will be replaced (decorated), not null
* @param decoratingIssuer The issuer for which the associated discounting curve will replace the currency discounting curve, not null
*/
public MulticurveProviderDiscountingDecoratedIssuer(final IssuerProviderInterface issuerProvider, final Currency decoratedCurrency, final LegalEntity decoratingIssuer) {
ArgumentChecker.notNull(issuerProvider, "issuerProvider");
ArgumentChecker.notNull(decoratedCurrency, "decoratedCurrency");
ArgumentChecker.notNull(decoratingIssuer, "decoratingIssuer");
_issuerProvider = issuerProvider;
_decoratedCurrency = decoratedCurrency;
_decoratingIssuer = decoratingIssuer;
}
@Override
public MulticurveProviderInterface getMulticurveProvider() {
return this;
}
@Override
public MulticurveProviderInterface copy() {
return new MulticurveProviderDiscountingDecoratedIssuer(_issuerProvider.copy(), _decoratedCurrency, _decoratingIssuer);
}
@Override
public double getDiscountFactor(final Currency ccy, final Double time) {
if (ccy.equals(_decoratedCurrency)) {
return _issuerProvider.getDiscountFactor(_decoratingIssuer, time);
}
return _issuerProvider.getMulticurveProvider().getDiscountFactor(ccy, time);
}
@Override
public double getInvestmentFactor(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getInvestmentFactor(index, startTime, endTime, accrualFactor);
}
@Override
public double getSimplyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getSimplyCompoundForwardRate(index, startTime, endTime, accrualFactor);
}
@Override
public double getSimplyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime) {
return _issuerProvider.getMulticurveProvider().getSimplyCompoundForwardRate(index, startTime, endTime);
}
@Override
public double getAnnuallyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getAnnuallyCompoundForwardRate(index, startTime, endTime, accrualFactor);
}
@Override
public double getAnnuallyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime) {
return _issuerProvider.getMulticurveProvider().getAnnuallyCompoundForwardRate(index, startTime, endTime);
}
@Override
public double getInvestmentFactor(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getInvestmentFactor(index, startTime, endTime, accrualFactor);
}
@Override
public double getSimplyCompoundForwardRate(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getSimplyCompoundForwardRate(index, startTime, endTime, accrualFactor);
}
@Override
public double getSimplyCompoundForwardRate(final IndexON index, final double startTime, final double endTime) {
return _issuerProvider.getMulticurveProvider().getSimplyCompoundForwardRate(index, startTime, endTime);
}
@Override
public double getAnnuallyCompoundForwardRate(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
return _issuerProvider.getMulticurveProvider().getAnnuallyCompoundForwardRate(index, startTime, endTime, accrualFactor);
}
@Override
public double getAnnuallyCompoundForwardRate(final IndexON index, final double startTime, final double endTime) {
return _issuerProvider.getMulticurveProvider().getAnnuallyCompoundForwardRate(index, startTime, endTime);
}
@Override
public double getFxRate(final Currency ccy1, final Currency ccy2) {
return _issuerProvider.getMulticurveProvider().getFxRate(ccy1, ccy2);
}
@Override
public double[] parameterSensitivity(final String name, final List<DoublesPair> pointSensitivity) {
return _issuerProvider.parameterSensitivity(name, pointSensitivity);
}
@Override
public double[] parameterForwardSensitivity(final String name, final List<ForwardSensitivity> pointSensitivity) {
return _issuerProvider.parameterForwardSensitivity(name, pointSensitivity);
}
@Override
public Integer getNumberOfParameters(final String name) {
return _issuerProvider.getNumberOfParameters(name);
}
@Override
public List<String> getUnderlyingCurvesNames(final String name) {
return _issuerProvider.getUnderlyingCurvesNames(name);
}
@Override
public String getName(final Currency ccy) {
if (ccy.equals(_decoratedCurrency)) {
return _issuerProvider.getName(_decoratingIssuer);
}
return _issuerProvider.getMulticurveProvider().getName(ccy);
}
@Override
public Set<Currency> getCurrencies() {
return _issuerProvider.getMulticurveProvider().getCurrencies();
}
@Override
public String getName(final IborIndex index) {
return _issuerProvider.getMulticurveProvider().getName(index);
}
@Override
public Set<IborIndex> getIndexesIbor() {
return _issuerProvider.getMulticurveProvider().getIndexesIbor();
}
@Override
public String getName(final IndexON index) {
return _issuerProvider.getMulticurveProvider().getName(index);
}
@Override
public Set<IndexON> getIndexesON() {
return _issuerProvider.getMulticurveProvider().getIndexesON();
}
@Override
public FXMatrix getFxRates() {
return _issuerProvider.getMulticurveProvider().getFxRates();
}
@Override
public Set<String> getAllNames() {
return _issuerProvider.getAllCurveNames();
}
@Override
public Set<String> getAllCurveNames() {
return _issuerProvider.getAllCurveNames();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _decoratedCurrency.hashCode();
result = prime * result + _decoratingIssuer.hashCode();
result = prime * result + _issuerProvider.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof MulticurveProviderDiscountingDecoratedIssuer)) {
return false;
}
final MulticurveProviderDiscountingDecoratedIssuer other = (MulticurveProviderDiscountingDecoratedIssuer) obj;
if (!ObjectUtils.equals(_decoratedCurrency, other._decoratedCurrency)) {
return false;
}
if (!ObjectUtils.equals(_decoratingIssuer, other._decoratingIssuer)) {
return false;
}
if (!ObjectUtils.equals(_issuerProvider, other._issuerProvider)) {
return false;
}
return true;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.d;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableProperties;
import com.facebook.buck.rules.ExternalTestRunnerRule;
import com.facebook.buck.rules.ExternalTestRunnerTestSpec;
import com.facebook.buck.rules.Label;
import com.facebook.buck.rules.NoopBuildRule;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TestRule;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.test.TestCaseSummary;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.TestRunningOptions;
import com.facebook.buck.test.result.type.ResultType;
import com.google.common.base.Functions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@SuppressWarnings("PMD.TestClassWithoutTestCases")
public class DTest extends NoopBuildRule implements TestRule, ExternalTestRunnerRule {
private ImmutableSortedSet<String> contacts;
private ImmutableSortedSet<Label> labels;
private ImmutableSet<BuildRule> sourceUnderTest;
private final Path testBinary;
public DTest(
BuildRuleParams params,
SourcePathResolver resolver,
Path testBinary,
ImmutableSortedSet<String> contacts,
ImmutableSortedSet<Label> labels,
ImmutableSet<BuildRule> sourceUnderTest) {
super(params, resolver);
this.contacts = contacts;
this.labels = labels;
this.sourceUnderTest = sourceUnderTest;
this.testBinary = testBinary;
}
@Override
public ImmutableSet<String> getContacts() {
return contacts;
}
public ImmutableList<String> getExecutableCommand(ProjectFilesystem projectFilesystem) {
return ImmutableList.of(projectFilesystem.resolve(testBinary).toString());
}
@Override
public ImmutableSet<Label> getLabels() {
return labels;
}
/**
* @return the path to which the test commands output is written.
*/
protected Path getPathToTestExitCode() {
return getPathToTestOutputDirectory().resolve("exitCode");
}
/**
* @return the path to which the test commands output is written.
*/
protected Path getPathToTestOutput() {
return getPathToTestOutputDirectory().resolve("output");
}
@Override
public Path getPathToTestOutputDirectory() {
return BuildTargets.getGenPath(
getBuildTarget(),
"__test_%s_output__");
}
@Override
public BuildableProperties getProperties() {
return new BuildableProperties(BuildableProperties.Kind.TEST);
}
private ImmutableList<String> getShellCommand() {
return getExecutableCommand(getProjectFilesystem());
}
@Override
public ImmutableSet<BuildRule> getSourceUnderTest() {
return sourceUnderTest;
}
@Override
public boolean hasTestResultFiles(ExecutionContext executionContext) {
return getProjectFilesystem().isFile(getPathToTestOutput());
}
@Override
public Callable<TestResults> interpretTestResults(
final ExecutionContext executionContext,
boolean isUsingTestSelectors,
final boolean isDryRun) {
return new Callable<TestResults>() {
@Override
public TestResults call() throws Exception {
ResultType resultType = ResultType.FAILURE;
// Successful exit indicates success.
try {
int exitCode = Integer.parseInt(
new String(Files.readAllBytes(
getProjectFilesystem().resolve(
getPathToTestExitCode()))));
if (exitCode == 0) {
resultType = ResultType.SUCCESS;
}
} catch (IOException e) {
// Any IO error means something went awry, so it's a failure.
resultType = ResultType.FAILURE;
}
String testOutput = getProjectFilesystem().readFileIfItExists(
getPathToTestOutput()).or("");
String message = "";
String stackTrace = "";
String testName = "";
if (resultType == ResultType.FAILURE && !testOutput.isEmpty()) {
// We don't get any information on successful runs, but failures usually come with
// some information. This code parses it.
int firstNewline = testOutput.indexOf('\n');
String firstLine = firstNewline == -1
? testOutput
: testOutput.substring(0, firstNewline);
// First line has format <Exception name>@<location>: <message>
// Use <location> as test name, and <message> as message.
Pattern firstLinePattern = Pattern.compile("^[^@]*@([^:]*): (.*)");
Matcher m = firstLinePattern.matcher(firstLine);
if (m.matches()) {
testName = m.group(1);
message = m.group(2);
}
// The whole output is actually a stack trace.
stackTrace = testOutput;
}
TestResultSummary summary = new TestResultSummary(
getBuildTarget().getShortName(),
testName,
resultType,
/* time */ 0,
message,
stackTrace,
testOutput,
/* stderr */ "");
return new TestResults(
getBuildTarget(),
ImmutableList.of(
new TestCaseSummary(
"main",
ImmutableList.of(summary))
),
contacts,
FluentIterable.from(labels).transform(Functions.toStringFunction()).toSet());
}
};
}
@Override
public ImmutableList<Step> runTests(
BuildContext buildContext,
ExecutionContext executionContext,
TestRunningOptions options,
TestRule.TestReportingCallback testReportingCallback) {
if (options.isDryRun()) {
return ImmutableList.of();
} else {
return ImmutableList.of(
new MakeCleanDirectoryStep(getProjectFilesystem(), getPathToTestOutputDirectory()),
new DTestStep(
getProjectFilesystem(),
getShellCommand(),
getPathToTestExitCode(),
getPathToTestOutput()));
}
}
@Override
public boolean runTestSeparately() {
return false;
}
@Override
public boolean supportsStreamingTests() {
return false;
}
@Override
public ExternalTestRunnerTestSpec getExternalTestRunnerSpec(
ExecutionContext executionContext,
TestRunningOptions testRunningOptions) {
return ExternalTestRunnerTestSpec.builder()
.setTarget(getBuildTarget())
.setType("dunit")
.setCommand(getShellCommand())
.setLabels(getLabels())
.setContacts(getContacts())
.build();
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package OCR;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author starktony
*/
public class ProfileDAO {
String sqlConnUrl = "jdbc:sqlserver://localhost:60299;DatabaseName=OCR;user=ocrsystem;Password=Initial1";
Connection sqlConn = null;
PreparedStatement sqlStmt = null;
ResultSet sqlRs = null;
public String saveProfile(ProfileDTO profileDTO) {
try {
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
sqlConn = DriverManager.getConnection(sqlConnUrl);
String sqlQuery = "INSERT INTO [OCR].[dbo].[PROFILE_MASTER] VALUES (?,?,?,?)";
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setString(1, profileDTO.getProfileId());
sqlStmt.setLong(2, profileDTO.getUserId());
sqlStmt.setString(3, profileDTO.getProfileName());
sqlStmt.setString(4, profileDTO.getProfileDetails());
sqlStmt.executeUpdate();
return "Profile Saved";
} catch (Exception e) {
e.printStackTrace();
return e.toString();
} finally {
try {
if (sqlRs != null) {
sqlRs.close();
}
if (sqlStmt != null) {
sqlStmt.close();
}
if (sqlConn != null) {
sqlConn.close();
}
} catch (Exception e) {
return e.toString();
}
}
}
public ProfileDTO loadProfile(ProfileDTO profileDTO) {
try {
String sqlQuery = "SELECT * FROM [OCR].[dbo].[PROFILE_MASTER] WHERE PROFILEID = ?";
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
sqlConn = DriverManager.getConnection(sqlConnUrl);
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setString(1, profileDTO.getProfileId());
ResultSet rs = sqlStmt.executeQuery();
while (rs.next()) {
profileDTO.setProfileId(rs.getString(1));
profileDTO.setUserId(rs.getLong(2));
profileDTO.setProfileName(rs.getString(3));
profileDTO.setProfileDetails(rs.getString(4));
}
return profileDTO;
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
try {
if (sqlRs != null) {
sqlRs.close();
}
if (sqlStmt != null) {
sqlStmt.close();
}
if (sqlConn != null) {
sqlConn.close();
}
} catch (Exception e) {
return null;
}
}
}
public String updateProfile(ProfileDTO profileDTO) {
try {
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
sqlConn = DriverManager.getConnection(sqlConnUrl);
String sqlQuery = "UPDATE [OCR].[dbo].[PROFILE_MASTER] SET PROFILENAME = ?, PROFILEDETAILS = ? "
+ "WHERE PROFILEID = ?";
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setString(1, profileDTO.getProfileName());
sqlStmt.setString(2, profileDTO.getProfileDetails());
sqlStmt.setString(3, profileDTO.getProfileId());
sqlStmt.executeUpdate();
return "Profile Updated";
} catch (Exception e) {
e.printStackTrace();
return e.toString();
} finally {
try {
if (sqlRs != null) {
sqlRs.close();
}
if (sqlStmt != null) {
sqlStmt.close();
}
if (sqlConn != null) {
sqlConn.close();
}
} catch (Exception e) {
return e.toString();
}
}
}
public String deleteProfile(ProfileDTO profileDTO) {
try {
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
sqlConn = DriverManager.getConnection(sqlConnUrl);
String sqlQuery = "DELETE FROM [OCR].[dbo].[IMAGE_MASTER] WHERE PROFILEID = ?";
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setString(1, profileDTO.getProfileId());
sqlStmt.executeUpdate();
sqlQuery = "DELETE FROM [OCR].[dbo].[PROFILE_MASTER] WHERE PROFILEID = ?";
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setString(1, profileDTO.getProfileId());
sqlStmt.executeUpdate();
return "Profile Deleted";
} catch (Exception e) {
e.printStackTrace();
return e.toString();
} finally {
try {
if (sqlRs != null) {
sqlRs.close();
}
if (sqlStmt != null) {
sqlStmt.close();
}
if (sqlConn != null) {
sqlConn.close();
}
} catch (Exception e) {
return e.toString();
}
}
}
public List<ProfileDTO> loadProfilesList(ProfileDTO profileDTO) {
try {
List<ProfileDTO> profileDTOList = new ArrayList<>();
String sqlQuery = "SELECT PROFILEID FROM [OCR].[dbo].[PROFILE_MASTER] WHERE USERID = ?";
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
sqlConn = DriverManager.getConnection(sqlConnUrl);
sqlStmt = sqlConn.prepareStatement(sqlQuery);
sqlStmt.setLong(1, profileDTO.getUserId());
ResultSet rs = sqlStmt.executeQuery();
while (rs.next()) {
profileDTO = new ProfileDTO();
profileDTO.setProfileId(rs.getString(1));
profileDTOList.add(profileDTO);
}
return profileDTOList;
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
try {
if (sqlRs != null) {
sqlRs.close();
}
if (sqlStmt != null) {
sqlStmt.close();
}
if (sqlConn != null) {
sqlConn.close();
}
} catch (Exception e) {
return null;
}
}
}
}
| |
/*
* Copyright (C) 2007, 2014 XStream Committers.
* All rights reserved.
*
* The software in this package is published under the terms of the BSD
* style license a copy of which has been included with this distribution in
* the LICENSE.txt file.
*
* Created on 29. July 2007 by Joerg Schaible
*/
package com.thoughtworks.xstream.converters.reflection;
import com.thoughtworks.acceptance.objects.StandardObject;
import com.thoughtworks.xstream.XStream;
import junit.framework.TestCase;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamField;
import java.io.Serializable;
/**
* @author Jörg Schaible
*/
public class SerializableConverterTest extends TestCase {
static class SimpleType extends StandardObject {
private String one;
private String two;
public String getOne() {
return this.one;
}
public void setOne(String one) {
this.one = one;
}
public String getTwo() {
return this.two;
}
public void setTwo(String two) {
this.two = two;
}
private void writeObject(final ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(final ObjectInputStream in)
throws IOException, ClassNotFoundException {
in.defaultReadObject();
}
}
public void testCanOmitFieldAtSerialization() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleType.class);
xstream.omitField(SimpleType.class, "two");
String expected = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <one>one</one>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleType simple = new SimpleType();
simple.setOne("one");
simple.setTwo("two");
String xml = xstream.toXML(simple);
assertEquals(expected, xml);
}
public void testCanOmitFieldAtDeserialization() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleType.class);
xstream.omitField(SimpleType.class, "two");
xstream.omitField(SimpleType.class, "x");
String xml = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <one>one</one>\n"
+ " <x>x</x>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleType simple = new SimpleType();
simple.setOne("one");
SimpleType serialized = (SimpleType)xstream.fromXML(xml);
assertEquals(simple, serialized);
}
static class ExtendedType extends SimpleType {
private String three;
public String getThree() {
return this.three;
}
public void setThree(String three) {
this.three = three;
}
private void writeObject(final ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(final ObjectInputStream in)
throws IOException, ClassNotFoundException {
in.defaultReadObject();
}
}
public void testCanOmitInheritedFieldAtSerialization() {
XStream xstream = new XStream();
xstream.alias("extended", ExtendedType.class);
xstream.alias("simple", SimpleType.class);
xstream.omitField(SimpleType.class, "two");
String expected = ""
+ "<extended serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <one>one</one>\n"
+ " </default>\n"
+ " </simple>\n"
+ " <extended>\n"
+ " <default>\n"
+ " <three>three</three>\n"
+ " </default>\n"
+ " </extended>\n"
+ "</extended>";
ExtendedType extended = new ExtendedType();
extended.setOne("one");
extended.setTwo("two");
extended.setThree("three");
String xml = xstream.toXML(extended);
assertEquals(expected, xml);
}
public void testCanOmitInheritedFieldAtDeserialization() {
XStream xstream = new XStream();
xstream.alias("extended", ExtendedType.class);
xstream.alias("simple", SimpleType.class);
xstream.omitField(SimpleType.class, "two");
xstream.omitField(SimpleType.class, "x");
String xml = ""
+ "<extended serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <one>one</one>\n"
+ " <x>x</x>\n"
+ " </default>\n"
+ " </simple>\n"
+ " <extended>\n"
+ " <default>\n"
+ " <three>three</three>\n"
+ " </default>\n"
+ " </extended>\n"
+ "</extended>";
ExtendedType extended = new ExtendedType();
extended.setOne("one");
extended.setThree("three");
SimpleType serialized = (SimpleType)xstream.fromXML(xml);
assertEquals(extended, serialized);
}
public static class SimpleNamedFieldsType extends StandardObject implements Serializable {
private String one;
private String two;
public String getOne() {
return this.one;
}
public void setOne(String one) {
this.one = one;
}
public String getTwo() {
return this.two;
}
public void setTwo(String two) {
this.two = two;
}
private static final ObjectStreamField[] serialPersistentFields = {
new ObjectStreamField("s1", String.class),
new ObjectStreamField("s2", String.class),
};
private void writeObject(ObjectOutputStream out) throws IOException {
// don't call defaultWriteObject()
ObjectOutputStream.PutField fields = out.putFields();
fields.put("s1", one);
fields.put("s2", two);
out.writeFields();
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
// don't call defaultReadObject()
ObjectInputStream.GetField fields = in.readFields();
one = (String) fields.get("s1", "1");
two = (String) fields.get("s2", "2");
}
}
public void testCanOmitNamedFieldAtSerialization() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleNamedFieldsType.class);
xstream.omitField(SimpleNamedFieldsType.class, "s2");
String expected = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <s1>one</s1>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleNamedFieldsType simple = new SimpleNamedFieldsType();
simple.setOne("one");
simple.setTwo("two");
String xml = xstream.toXML(simple);
assertEquals(expected, xml);
}
public void testCanOmitNamedFieldAtDeserialization() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleNamedFieldsType.class);
xstream.omitField(SimpleNamedFieldsType.class, "s2");
xstream.omitField(SimpleNamedFieldsType.class, "x");
String xml = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <s1>one</s1>\n"
+ " <x>x</x>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleNamedFieldsType simple = new SimpleNamedFieldsType();
simple.setOne("one");
simple.setTwo("2");
SimpleNamedFieldsType serialized = (SimpleNamedFieldsType)xstream.fromXML(xml);
assertEquals(simple, serialized);
}
public void testCanAliasField() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleType.class);
xstream.aliasField("s2", SimpleType.class, "two");
String expected = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <one>one</one>\n"
+ " <s2>two</s2>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleType simple = new SimpleType();
simple.setOne("one");
simple.setTwo("two");
String xml = xstream.toXML(simple);
assertEquals(expected, xml);
SimpleType serialized = (SimpleType)xstream.fromXML(xml);
assertEquals(simple, serialized);
}
public void testCanAliasNamedField() {
XStream xstream = new XStream();
xstream.alias("simple", SimpleNamedFieldsType.class);
xstream.aliasField("two", SimpleNamedFieldsType.class, "s2");
String expected = ""
+ "<simple serialization=\"custom\">\n"
+ " <simple>\n"
+ " <default>\n"
+ " <s1>one</s1>\n"
+ " <two>two</two>\n"
+ " </default>\n"
+ " </simple>\n"
+ "</simple>";
SimpleNamedFieldsType simple = new SimpleNamedFieldsType();
simple.setOne("one");
simple.setTwo("two");
String xml = xstream.toXML(simple);
assertEquals(expected, xml);
SimpleNamedFieldsType serialized = (SimpleNamedFieldsType)xstream.fromXML(xml);
assertEquals(simple, serialized);
}
public static class SerializableType implements Serializable {
public Serializable serializable;
}
public void testCanHandleFieldsDeclaredWithSerializableInterface() {
XStream xstream = new XStream();
xstream.alias("sertype", SerializableType.class);
xstream.useAttributeFor(SerializableType.class, "serializable");
String expected = ""
+ "<sertype>\n"
+ " <serializable class=\"string\">String</serializable>\n"
+ "</sertype>";
SerializableType s = new SerializableType();
s.serializable = "String";
String xml = xstream.toXML(s);
assertEquals(expected, xml);
SerializableType serialized = (SerializableType)xstream.fromXML(xml);
assertEquals(s.serializable, serialized.serializable);
}
}
| |
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package rx.internal.operators;
import org.junit.Test;
import rx.Observable;
import rx.exceptions.*;
import rx.functions.Func1;
import rx.observers.TestSubscriber;
import rx.subjects.PublishSubject;
public class OnSubscribeConcatDelayErrorTest {
@Test
public void mainCompletes() {
PublishSubject<Integer> source = PublishSubject.create();
TestSubscriber<Integer> ts = TestSubscriber.create();
source.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return Observable.range(v, 2);
}
}).subscribe(ts);
source.onNext(1);
source.onNext(2);
source.onCompleted();
ts.assertValues(1, 2, 2, 3);
ts.assertNoErrors();
ts.assertCompleted();
}
@Test
public void mainErrors() {
PublishSubject<Integer> source = PublishSubject.create();
TestSubscriber<Integer> ts = TestSubscriber.create();
source.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return Observable.range(v, 2);
}
}).subscribe(ts);
source.onNext(1);
source.onNext(2);
source.onError(new TestException());
ts.assertValues(1, 2, 2, 3);
ts.assertError(TestException.class);
ts.assertNotCompleted();
}
@Test
public void innerErrors() {
final Observable<Integer> inner = Observable.range(1, 2).concatWith(Observable.<Integer>error(new TestException()));
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.range(1, 3).concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return inner;
}
}).subscribe(ts);
ts.assertValues(1, 2, 1, 2, 1, 2);
ts.assertError(CompositeException.class);
ts.assertNotCompleted();
}
@Test
public void singleInnerErrors() {
final Observable<Integer> inner = Observable.range(1, 2).concatWith(Observable.<Integer>error(new TestException()));
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.just(1)
.asObservable() // prevent scalar optimization
.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return inner;
}
}).subscribe(ts);
ts.assertValues(1, 2);
ts.assertError(TestException.class);
ts.assertNotCompleted();
}
@Test
public void innerNull() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.just(1)
.asObservable() // prevent scalar optimization
.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return null;
}
}).subscribe(ts);
ts.assertNoValues();
ts.assertError(NullPointerException.class);
ts.assertNotCompleted();
}
@Test
public void innerThrows() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.just(1)
.asObservable() // prevent scalar optimization
.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
throw new TestException();
}
}).subscribe(ts);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotCompleted();
}
@Test
public void innerWithEmpty() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.range(1, 3)
.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return v == 2 ? Observable.<Integer>empty() : Observable.range(1, 2);
}
}).subscribe(ts);
ts.assertValues(1, 2, 1, 2);
ts.assertNoErrors();
ts.assertCompleted();
}
@Test
public void innerWithScalar() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Observable.range(1, 3)
.concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return v == 2 ? Observable.just(3) : Observable.range(1, 2);
}
}).subscribe(ts);
ts.assertValues(1, 2, 3, 1, 2);
ts.assertNoErrors();
ts.assertCompleted();
}
@Test
public void backpressure() {
TestSubscriber<Integer> ts = TestSubscriber.create(0);
Observable.range(1, 3).concatMapDelayError(new Func1<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> call(Integer v) {
return Observable.range(v, 2);
}
}).subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotCompleted();
ts.requestMore(1);
ts.assertValues(1);
ts.assertNoErrors();
ts.assertNotCompleted();
ts.requestMore(3);
ts.assertValues(1, 2, 2, 3);
ts.assertNoErrors();
ts.assertNotCompleted();
ts.requestMore(2);
ts.assertValues(1, 2, 2, 3, 3, 4);
ts.assertNoErrors();
ts.assertCompleted();
}
}
| |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
package com.google.crypto.tink;
import static com.google.common.truth.Truth.assertThat;
import static com.google.crypto.tink.testing.TestUtil.assertExceptionContains;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.crypto.tink.aead.AeadConfig;
import com.google.crypto.tink.aead.AesEaxKeyManager;
import com.google.crypto.tink.aead.AesGcmKeyManager;
import com.google.crypto.tink.config.TinkConfig;
import com.google.crypto.tink.config.internal.TinkFipsUtil;
import com.google.crypto.tink.mac.MacConfig;
import com.google.crypto.tink.mac.MacKeyTemplates;
import com.google.crypto.tink.proto.AesEaxKey;
import com.google.crypto.tink.proto.AesGcmKey;
import com.google.crypto.tink.proto.AesGcmKeyFormat;
import com.google.crypto.tink.proto.Ed25519KeyFormat;
import com.google.crypto.tink.proto.Ed25519PrivateKey;
import com.google.crypto.tink.proto.Ed25519PublicKey;
import com.google.crypto.tink.proto.HashType;
import com.google.crypto.tink.proto.HmacKey;
import com.google.crypto.tink.proto.KeyData;
import com.google.crypto.tink.proto.KeyData.KeyMaterialType;
import com.google.crypto.tink.proto.KeyStatusType;
import com.google.crypto.tink.proto.Keyset;
import com.google.crypto.tink.proto.OutputPrefixType;
import com.google.crypto.tink.signature.EcdsaSignKeyManager;
import com.google.crypto.tink.signature.SignatureKeyTemplates;
import com.google.crypto.tink.subtle.AesEaxJce;
import com.google.crypto.tink.subtle.AesGcmJce;
import com.google.crypto.tink.subtle.PrfMac;
import com.google.crypto.tink.subtle.Random;
import com.google.crypto.tink.testing.TestUtil;
import com.google.crypto.tink.testing.TestUtil.DummyAead;
import com.google.protobuf.ByteString;
import com.google.protobuf.ExtensionRegistryLite;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.MessageLite;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for Registry. */
@RunWith(JUnit4.class)
public class RegistryTest {
private static class CustomAeadKeyManager implements KeyManager<Aead> {
public CustomAeadKeyManager(String typeUrl) {
this.typeUrl = typeUrl;
}
private final String typeUrl;
@Override
public Aead getPrimitive(ByteString proto) throws GeneralSecurityException {
return new DummyAead();
}
@Override
public Aead getPrimitive(MessageLite proto) throws GeneralSecurityException {
return new DummyAead();
}
@Override
public MessageLite newKey(ByteString template) throws GeneralSecurityException {
throw new GeneralSecurityException("Not Implemented");
}
@Override
public MessageLite newKey(MessageLite template) throws GeneralSecurityException {
throw new GeneralSecurityException("Not Implemented");
}
@Override
public KeyData newKeyData(ByteString serializedKeyFormat) throws GeneralSecurityException {
return KeyData.newBuilder()
.setTypeUrl(typeUrl)
.setValue(serializedKeyFormat)
.build();
}
@Override
public boolean doesSupport(String typeUrl) {
return typeUrl.equals(this.typeUrl);
}
@Override
public String getKeyType() {
return this.typeUrl;
}
@Override
public int getVersion() {
return 0;
}
@Override
public Class<Aead> getPrimitiveClass() {
return Aead.class;
}
}
private static interface EncryptOnly {
byte[] encrypt(final byte[] plaintext) throws GeneralSecurityException;
}
private static class AeadToEncryptOnlyWrapper implements PrimitiveWrapper<Aead, EncryptOnly> {
@Override
public EncryptOnly wrap(PrimitiveSet<Aead> set) throws GeneralSecurityException {
return new EncryptOnly() {
@Override
public byte[] encrypt(final byte[] plaintext)
throws GeneralSecurityException {
return set.getPrimary().getPrimitive().encrypt(plaintext, new byte[0]);
}
};
}
@Override
public Class<EncryptOnly> getPrimitiveClass() {
return EncryptOnly.class;
}
@Override
public Class<Aead> getInputPrimitiveClass() {
return Aead.class;
}
}
@Before
public void setUp() throws GeneralSecurityException {
TinkFipsUtil.unsetFipsRestricted();
Registry.reset();
TinkConfig.register();
Registry.registerPrimitiveWrapper(new AeadToEncryptOnlyWrapper());
}
private void testGetKeyManagerShouldWork(String typeUrl, String className) throws Exception {
assertThat(Registry.getKeyManager(typeUrl).getClass().toString()).contains(className);
}
@Test
public void testGetKeyManager_legacy_shouldWork() throws Exception {
testGetKeyManagerShouldWork(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL, "KeyManagerImpl");
testGetKeyManagerShouldWork(AeadConfig.AES_EAX_TYPE_URL, "KeyManagerImpl");
testGetKeyManagerShouldWork(MacConfig.HMAC_TYPE_URL, "KeyManagerImpl");
}
@Test
public void testGetKeyManager_shouldWorkAesEax() throws Exception {
assertThat(
Registry.getKeyManager(AeadConfig.AES_EAX_TYPE_URL, Aead.class).getClass().toString())
.contains("KeyManagerImpl");
}
@Test
public void testGetKeyManager_shouldWorkHmac() throws Exception {
assertThat(Registry.getKeyManager(MacConfig.HMAC_TYPE_URL, Mac.class).getClass().toString())
.contains("KeyManagerImpl");
}
@Test
public void testGetKeyManager_legacy_wrongType_shouldThrowException() throws Exception {
KeyManager<Aead> wrongType = Registry.getKeyManager(MacConfig.HMAC_TYPE_URL);
HmacKey hmacKey = (HmacKey) Registry.newKey(MacKeyTemplates.HMAC_SHA256_128BITTAG);
ClassCastException e =
assertThrows(
ClassCastException.class,
() -> {
Aead unused = wrongType.getPrimitive(hmacKey);
});
assertExceptionContains(e, "com.google.crypto.tink.Aead");
assertExceptionContains(e, "com.google.crypto.tink.subtle.PrfMac");
}
@Test
public void testGetKeyManager_wrongType_shouldThrowException() throws Exception {
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.getKeyManager(MacConfig.HMAC_TYPE_URL, Aead.class));
assertExceptionContains(e, "com.google.crypto.tink.Mac");
assertExceptionContains(e, "com.google.crypto.tink.Aead not supported");
}
@Test
public void testGetKeyManager_legacy_badTypeUrl_shouldThrowException() throws Exception {
String badTypeUrl = "bad type URL";
GeneralSecurityException e =
assertThrows(GeneralSecurityException.class, () -> Registry.getKeyManager(badTypeUrl));
assertExceptionContains(e, "No key manager found");
assertExceptionContains(e, badTypeUrl);
}
@Test
public void testGetKeyManager_badTypeUrl_shouldThrowException() throws Exception {
String badTypeUrl = "bad type URL";
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class, () -> Registry.getKeyManager(badTypeUrl, Aead.class));
assertExceptionContains(e, "No key manager found");
assertExceptionContains(e, badTypeUrl);
}
@Test
public void testGetUntypedKeyManager_shouldWorkHmac() throws Exception {
assertThat(Registry.getUntypedKeyManager(MacConfig.HMAC_TYPE_URL).getClass().toString())
.contains("KeyManagerImpl");
}
@Test
public void testRegisterKeyManager_keyManagerIsNull_shouldThrowException() throws Exception {
IllegalArgumentException e =
assertThrows(IllegalArgumentException.class, () -> Registry.registerKeyManager(null));
assertThat(e.toString()).contains("must be non-null");
}
@Test
public void testRegisterKeyManager_moreRestrictedNewKeyAllowed_shouldWork() throws Exception {
String typeUrl = "someTypeUrl";
Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl));
Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl), false);
}
@Test
public void testRegisterKeyManager_sameNewKeyAllowed_shouldWork() throws Exception {
String typeUrl = "someOtherTypeUrl";
Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl));
Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl), true);
}
@Test
public void testRegisterKeyManager_lessRestrictedNewKeyAllowed_shouldThrowException()
throws Exception {
String typeUrl = "yetAnotherTypeUrl";
Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl), false);
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new CustomAeadKeyManager(typeUrl), true));
}
@Test
public void testRegisterKeyManager_keyManagerFromAnotherClass_shouldThrowException()
throws Exception {
// This should not overwrite the existing manager.
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerKeyManager(
new CustomAeadKeyManager(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL)));
assertThat(e.toString()).contains("already registered");
KeyManager<Aead> manager = Registry.getKeyManager(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL);
assertThat(manager.getClass().toString()).contains("KeyManagerImpl");
}
@Test
public void testRegisterKeyManager_deprecated_keyManagerIsNull_shouldThrowException()
throws Exception {
IllegalArgumentException e =
assertThrows(
IllegalArgumentException.class,
() -> Registry.registerKeyManager(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL, null));
assertThat(e.toString()).contains("must be non-null");
}
@Test
public void testRegisterKeyManager_deprecated_withKeyTypeNotSupported_shouldThrowException()
throws Exception {
String typeUrl = "yetSomeOtherTypeUrl";
String differentTypeUrl = "differentTypeUrl";
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(differentTypeUrl, new CustomAeadKeyManager(typeUrl)));
assertExceptionContains(e, "Manager does not support key type " + differentTypeUrl);
}
@Test
public void testRegisterKeyManager_deprecated_moreRestrictedNewKeyAllowed_shouldWork()
throws Exception {
String typeUrl = "typeUrl";
Registry.registerKeyManager(typeUrl, new CustomAeadKeyManager(typeUrl));
try {
Registry.registerKeyManager(typeUrl, new CustomAeadKeyManager(typeUrl), false);
} catch (GeneralSecurityException e) {
fail("repeated registrations of the same key manager should work");
}
}
@Test
public void testRegisterKeyManager_deprecated_lessRestrictedNewKeyAllowed_shouldThrowException()
throws Exception {
String typeUrl = "totallyDifferentTypeUrl";
Registry.registerKeyManager(typeUrl, new CustomAeadKeyManager(typeUrl), false);
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(typeUrl, new CustomAeadKeyManager(typeUrl), true));
}
@Test
public void testRegisterKeyManager_deprecated_keyManagerFromAnotherClass_shouldThrowException()
throws Exception {
// This should not overwrite the existing manager.
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerKeyManager(
AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL,
new CustomAeadKeyManager(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL)));
assertThat(e.toString()).contains("already registered");
KeyManager<Aead> manager = Registry.getKeyManager(AeadConfig.AES_CTR_HMAC_AEAD_TYPE_URL);
assertThat(manager.getClass().toString()).contains("KeyManagerImpl");
}
@Test
public void testGetPublicKeyData_shouldWork() throws Exception {
KeyData privateKeyData = Registry.newKeyData(SignatureKeyTemplates.ECDSA_P256);
KeyData publicKeyData = Registry.getPublicKeyData(privateKeyData.getTypeUrl(),
privateKeyData.getValue());
PublicKeyVerify verifier = Registry.<PublicKeyVerify>getPrimitive(publicKeyData);
PublicKeySign signer = Registry.<PublicKeySign>getPrimitive(privateKeyData);
byte[] message = "Nice test message".getBytes(UTF_8);
verifier.verify(signer.sign(message), message);
}
@Test
public void testGetPublicKeyData_shouldThrow() throws Exception {
KeyData keyData = Registry.newKeyData(MacKeyTemplates.HMAC_SHA256_128BITTAG);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.getPublicKeyData(keyData.getTypeUrl(), keyData.getValue()));
assertThat(e.toString()).contains("not a PrivateKeyManager");
}
@Test
public void testGetInputPrimitive_encryptOnly() throws Exception {
assertThat(Registry.getInputPrimitive(EncryptOnly.class)).isEqualTo(Aead.class);
assertThat(Registry.getInputPrimitive(Aead.class)).isEqualTo(Aead.class);
}
@Test
public void testGetPrimitive_legacy_aesGcm_shouldWork() throws Exception {
AesEaxKey aesEaxKey =
(AesEaxKey) Registry.newKey(AesEaxKeyManager.aes128EaxTemplate().getProto());
KeyData aesEaxKeyData = Registry.newKeyData(AesEaxKeyManager.aes128EaxTemplate().getProto());
Aead aead = Registry.getPrimitive(aesEaxKeyData);
assertThat(aesEaxKey.getKeyValue().size()).isEqualTo(16);
assertThat(aesEaxKeyData.getTypeUrl()).isEqualTo(AeadConfig.AES_EAX_TYPE_URL);
// This might break when we add native implementations.
assertThat(aead.getClass()).isEqualTo(AesEaxJce.class);
}
@Test
public void testGetPrimitive_aesGcm_shouldWork() throws Exception {
AesEaxKey aesEaxKey =
(AesEaxKey) Registry.newKey(AesEaxKeyManager.aes128EaxTemplate().getProto());
KeyData aesEaxKeyData = Registry.newKeyData(AesEaxKeyManager.aes128EaxTemplate().getProto());
Aead aead = Registry.getPrimitive(aesEaxKeyData, Aead.class);
assertThat(aesEaxKey.getKeyValue().size()).isEqualTo(16);
assertThat(aesEaxKeyData.getTypeUrl()).isEqualTo(AeadConfig.AES_EAX_TYPE_URL);
// This might break when we add native implementations.
assertThat(aead.getClass()).isEqualTo(AesEaxJce.class);
}
@Test
public void testGetPrimitive_legacy_hmac_shouldWork() throws Exception {
com.google.crypto.tink.proto.KeyTemplate template = MacKeyTemplates.HMAC_SHA256_128BITTAG;
HmacKey hmacKey = (HmacKey) Registry.newKey(template);
KeyData hmacKeyData = Registry.newKeyData(template);
Mac mac = Registry.getPrimitive(hmacKeyData);
assertThat(hmacKey.getKeyValue().size()).isEqualTo(32);
assertThat(hmacKey.getParams().getTagSize()).isEqualTo(16);
assertThat(hmacKey.getParams().getHash()).isEqualTo(HashType.SHA256);
assertThat(hmacKeyData.getTypeUrl()).isEqualTo(MacConfig.HMAC_TYPE_URL);
// This might break when we add native implementations.
assertThat(mac.getClass()).isEqualTo(PrfMac.class);
}
@Test
public void testGetPrimitive_hmac_shouldWork() throws Exception {
com.google.crypto.tink.proto.KeyTemplate template = MacKeyTemplates.HMAC_SHA256_128BITTAG;
HmacKey hmacKey = (HmacKey) Registry.newKey(template);
KeyData hmacKeyData = Registry.newKeyData(template);
Mac mac = Registry.getPrimitive(hmacKeyData, Mac.class);
assertThat(hmacKey.getKeyValue().size()).isEqualTo(32);
assertThat(hmacKey.getParams().getTagSize()).isEqualTo(16);
assertThat(hmacKey.getParams().getHash()).isEqualTo(HashType.SHA256);
assertThat(hmacKeyData.getTypeUrl()).isEqualTo(MacConfig.HMAC_TYPE_URL);
// This might break when we add native implementations.
assertThat(mac.getClass()).isEqualTo(PrfMac.class);
}
@Test
public void
testNewKeyData_keyTemplateProto_registeredTypeUrl_returnsCustomAeadKeyManagerNewKeyData()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
CustomAeadKeyManager km = new CustomAeadKeyManager(typeUrl);
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
Registry.registerKeyManager(km);
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(keyformat)
.setTypeUrl(typeUrl)
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
assertThat(Registry.newKeyData(template)).isEqualTo(km.newKeyData(keyformat));
}
@Test
public void testNewKeyData_keyTemplateProto_registeredTypeUrlNewKeyAllowedFalse_throwsException()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
CustomAeadKeyManager km = new CustomAeadKeyManager(typeUrl);
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
Registry.registerKeyManager(km, false);
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(keyformat)
.setTypeUrl(typeUrl)
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
assertThrows(GeneralSecurityException.class, () -> Registry.newKeyData(template));
}
@Test
public void testNewKeyData_keyTemplateProto_unregisteredTypeUrl_throwsException()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(keyformat)
.setTypeUrl(typeUrl)
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
assertThrows(GeneralSecurityException.class, () -> Registry.newKeyData(template));
}
@Test
public void
testNewKeyData_keyTemplateClass_registeredTypeUrl_returnsCustomAeadKeyManagerNewKeyData()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
CustomAeadKeyManager km = new CustomAeadKeyManager(typeUrl);
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
Registry.registerKeyManager(km);
com.google.crypto.tink.KeyTemplate template =
com.google.crypto.tink.KeyTemplate.create(
typeUrl, keyformat.toByteArray(),
com.google.crypto.tink.KeyTemplate.OutputPrefixType.TINK);
assertThat(Registry.newKeyData(template)).isEqualTo(km.newKeyData(keyformat));
}
@Test
public void testNewKeyData_keyTemplateClass_registeredTypeUrlNewKeyAllowedFalse_throwsException()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
CustomAeadKeyManager km = new CustomAeadKeyManager(typeUrl);
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
Registry.registerKeyManager(km, false);
com.google.crypto.tink.KeyTemplate template =
com.google.crypto.tink.KeyTemplate.create(
typeUrl, keyformat.toByteArray(),
com.google.crypto.tink.KeyTemplate.OutputPrefixType.TINK);
assertThrows(GeneralSecurityException.class, () -> Registry.newKeyData(template));
}
@Test
public void testNewKeyData_keyTemplateClass_unregisteredTypeUrl_throwsException()
throws Exception {
String typeUrl = "testNewKeyDataTypeUrl";
ByteString keyformat = ByteString.copyFromUtf8("testNewKeyDataKeyFormat");
com.google.crypto.tink.KeyTemplate template =
com.google.crypto.tink.KeyTemplate.create(
typeUrl, keyformat.toByteArray(),
com.google.crypto.tink.KeyTemplate.OutputPrefixType.TINK);
assertThrows(GeneralSecurityException.class, () -> Registry.newKeyData(template));
}
private static Map<String, KeyTypeManager.KeyFactory.KeyFormat<AesGcmKeyFormat>>
createTestAesGcmKeyFormats() {
Map<String, KeyTypeManager.KeyFactory.KeyFormat<AesGcmKeyFormat>> formats = new HashMap<>();
formats.put(
"TINK",
new KeyTypeManager.KeyFactory.KeyFormat<>(
AesGcmKeyFormat.newBuilder().setKeySize(16).build(),
KeyTemplate.OutputPrefixType.TINK));
formats.put(
"RAW",
new KeyTypeManager.KeyFactory.KeyFormat<>(
AesGcmKeyFormat.newBuilder().setKeySize(32).build(), KeyTemplate.OutputPrefixType.RAW));
return Collections.unmodifiableMap(formats);
}
/** Implementation of a KeyTypeManager for testing. */
private static class TestKeyTypeManager extends KeyTypeManager<AesGcmKey> {
private Map<String, KeyFactory.KeyFormat<AesGcmKeyFormat>> keyFormats =
createTestAesGcmKeyFormats();
public TestKeyTypeManager() {
super(
AesGcmKey.class,
new PrimitiveFactory<Aead, AesGcmKey>(Aead.class) {
@Override
public Aead getPrimitive(AesGcmKey key) throws GeneralSecurityException {
return new AesGcmJce(key.getKeyValue().toByteArray());
}
},
new PrimitiveFactory<FakeAead, AesGcmKey>(FakeAead.class) {
@Override
public FakeAead getPrimitive(AesGcmKey key) {
return new FakeAead();
}
});
}
public TestKeyTypeManager(Map<String, KeyFactory.KeyFormat<AesGcmKeyFormat>> keyFormats) {
super(
AesGcmKey.class,
new PrimitiveFactory<Aead, AesGcmKey>(Aead.class) {
@Override
public Aead getPrimitive(AesGcmKey key) throws GeneralSecurityException {
return new AesGcmJce(key.getKeyValue().toByteArray());
}
},
new PrimitiveFactory<FakeAead, AesGcmKey>(FakeAead.class) {
@Override
public FakeAead getPrimitive(AesGcmKey key) {
return new FakeAead();
}
});
this.keyFormats = keyFormats;
}
@Override
public String getKeyType() {
return "type.googleapis.com/google.crypto.tink.AesGcmKey";
}
@Override
public int getVersion() {
return 1;
}
@Override
public KeyMaterialType keyMaterialType() {
return KeyMaterialType.SYMMETRIC;
}
@Override
public void validateKey(AesGcmKey keyProto) throws GeneralSecurityException {
// Throw by hand so we can verify the exception comes from here.
if (keyProto.getKeyValue().size() != 16) {
throw new GeneralSecurityException("validateKey(AesGcmKey) failed");
}
}
@Override
public AesGcmKey parseKey(ByteString byteString) throws InvalidProtocolBufferException {
return AesGcmKey.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public KeyFactory<AesGcmKeyFormat, AesGcmKey> keyFactory() {
return new KeyFactory<AesGcmKeyFormat, AesGcmKey>(AesGcmKeyFormat.class) {
@Override
public void validateKeyFormat(AesGcmKeyFormat format) throws GeneralSecurityException {
// Throw by hand so we can verify the exception comes from here.
if (format.getKeySize() != 16) {
throw new GeneralSecurityException("validateKeyFormat(AesGcmKeyFormat) failed");
}
}
@Override
public AesGcmKeyFormat parseKeyFormat(ByteString byteString)
throws InvalidProtocolBufferException {
return AesGcmKeyFormat.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public AesGcmKey createKey(AesGcmKeyFormat format) throws GeneralSecurityException {
return AesGcmKey.newBuilder()
.setKeyValue(ByteString.copyFrom(Random.randBytes(format.getKeySize())))
.setVersion(getVersion())
.build();
}
@Override
public AesGcmKey deriveKey(AesGcmKeyFormat format, InputStream stream)
throws GeneralSecurityException {
byte[] pseudorandomness = new byte[format.getKeySize()];
try {
stream.read(pseudorandomness);
} catch (IOException e) {
throw new AssertionError("Unexpected IOException", e);
}
return AesGcmKey.newBuilder()
.setKeyValue(ByteString.copyFrom(pseudorandomness))
.setVersion(getVersion())
.build();
}
@Override
public Map<String, KeyFactory.KeyFormat<AesGcmKeyFormat>> keyFormats() {
return keyFormats;
}
};
}
}
@Test
public void testRegisterKeyTypeManager() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
}
@Test
public void testRegisterKeyTypeManager_keyTemplates_works() throws Exception {
Registry.reset();
assertThat(Registry.keyTemplates()).isEmpty();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
assertThat(Registry.keyTemplates()).hasSize(2);
assertThat(Registry.keyTemplates()).contains("TINK");
assertThat(Registry.keyTemplates()).contains("RAW");
}
@Test
public void testRegisterKeyTypeManager_disallowedNewKey_keyTemplates_works() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), false);
assertThat(Registry.keyTemplates()).isEmpty();
}
@Test
public void testRegisterKeyTypeManager_existingKeyManager_noNewKeyTemplate_works()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
Registry.registerKeyManager(new TestKeyTypeManager(), true);
}
@Test
public void testRegisterKeyTypeManager_existingKeyManager_newKeyTemplate_fails()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
Map<String, KeyTypeManager.KeyFactory.KeyFormat<AesGcmKeyFormat>> formats = new HashMap<>();
formats.put(
"NEW_KEY_TEMPLATE_NAME",
new KeyTypeManager.KeyFactory.KeyFormat<>(
AesGcmKeyFormat.newBuilder().setKeySize(16).build(),
KeyTemplate.OutputPrefixType.TINK));
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new TestKeyTypeManager(formats), true));
}
@Test
public void testRegisterKeyTypeManager_newKeyManager_existingKeyTemplate_fails()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
TestKeyTypeManager manager =
new TestKeyTypeManager() {
@Override
public String getKeyType() {
return "blah";
}
};
assertThrows(GeneralSecurityException.class, () -> Registry.registerKeyManager(manager, true));
}
@Test
public void testRegisterKeyTypeManager_getKeyManagerAead_works() throws Exception {
Registry.reset();
TestKeyTypeManager testKeyTypeManager = new TestKeyTypeManager();
Registry.registerKeyManager(testKeyTypeManager, true);
KeyManager<Aead> km = Registry.getKeyManager(testKeyTypeManager.getKeyType(), Aead.class);
assertThat(km.getKeyType()).isEqualTo(testKeyTypeManager.getKeyType());
}
@Test
public void testRegisterKeyTypeManager_getKeyManagerFakeAead_works() throws Exception {
Registry.reset();
TestKeyTypeManager testKeyTypeManager = new TestKeyTypeManager();
Registry.registerKeyManager(testKeyTypeManager, true);
KeyManager<FakeAead> km =
Registry.getKeyManager(testKeyTypeManager.getKeyType(), FakeAead.class);
assertThat(km.getKeyType()).isEqualTo(testKeyTypeManager.getKeyType());
}
@Test
public void testRegisterKeyTypeManager_getKeyManagerMac_throws() throws Exception {
Registry.reset();
TestKeyTypeManager testKeyTypeManager = new TestKeyTypeManager();
Registry.registerKeyManager(testKeyTypeManager, true);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.getKeyManager(testKeyTypeManager.getKeyType(), Mac.class));
assertExceptionContains(e, "com.google.crypto.tink.Mac");
assertExceptionContains(e, "com.google.crypto.tink.Aead");
assertExceptionContains(e, "com.google.crypto.tink.RegistryTest.FakeAead");
}
// Checks that calling getUntypedKeyManager will return the keymanager for the *first* implemented
// class in the constructor.
@Test
public void testRegisterKeyTypeManager_getUntypedKeyManager_returnsAead() throws Exception {
Registry.reset();
TestKeyTypeManager testKeyTypeManager = new TestKeyTypeManager();
Registry.registerKeyManager(testKeyTypeManager, true);
KeyManager<?> km = Registry.getUntypedKeyManager(testKeyTypeManager.getKeyType());
assertThat(km.getPrimitiveClass()).isEqualTo(Aead.class);
}
@Test
public void testRegisterKeyTypeManager_moreRestrictedNewKeyAllowed_shouldWork() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
Registry.registerKeyManager(new TestKeyTypeManager(), false);
}
@Test
public void testRegisterKeyTypeManager_sameNewKeyAllowed_shouldWork() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
Registry.registerKeyManager(new TestKeyTypeManager(), true);
Registry.registerKeyManager(new TestKeyTypeManager(), false);
Registry.registerKeyManager(new TestKeyTypeManager(), false);
}
@Test
public void testRegisterKeyTypeManager_lessRestrictedNewKeyAllowed_throws() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new TestKeyTypeManager(), true));
}
@Test
public void testRegisterKeyTypeManager_differentClass_throws() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new TestKeyTypeManager() {}, true));
}
@Test
public void testRegisterKeyTypeManager_afterKeyManager_throws() throws Exception {
Registry.reset();
Registry.registerKeyManager(new CustomAeadKeyManager(new TestKeyTypeManager().getKeyType()));
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new TestKeyTypeManager(), true));
}
@Test
public void testRegisterKeyTypeManager_beforeKeyManager_throws() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerKeyManager(
new CustomAeadKeyManager(new TestKeyTypeManager().getKeyType())));
}
@Test
public void testParseKeyData_succeeds() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
AesGcmKey key =
AesGcmKey.newBuilder()
.setKeyValue(ByteString.copyFrom("0123456789abcdef".getBytes(UTF_8)))
.build();
KeyData keyData =
KeyData.newBuilder()
.setTypeUrl(new TestKeyTypeManager().getKeyType())
.setValue(key.toByteString())
.build();
assertThat(Registry.parseKeyData(keyData)).isEqualTo(key);
}
@Test
public void testDeriveKey_succeeds() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
AesGcmKeyFormat format = AesGcmKeyFormat.newBuilder().setKeySize(16).build();
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(format.toByteString())
.setTypeUrl(new TestKeyTypeManager().getKeyType())
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
byte[] keyMaterial = Random.randBytes(100);
KeyData keyData = Registry.deriveKey(template, new ByteArrayInputStream(keyMaterial));
assertThat(keyData.getKeyMaterialType()).isEqualTo(new TestKeyTypeManager().keyMaterialType());
assertThat(keyData.getTypeUrl()).isEqualTo(new TestKeyTypeManager().getKeyType());
AesGcmKey key =
AesGcmKey.parseFrom(keyData.getValue(), ExtensionRegistryLite.getEmptyRegistry());
for (int i = 0; i < 16; ++i) {
assertThat(key.getKeyValue().byteAt(i)).isEqualTo(keyMaterial[i]);
}
}
// Tests that validate is called.
@Test
public void testDeriveKey_wrongKeySize_validateThrows() throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestKeyTypeManager(), true);
AesGcmKeyFormat format = AesGcmKeyFormat.newBuilder().setKeySize(32).build();
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(format.toByteString())
.setTypeUrl(new TestKeyTypeManager().getKeyType())
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
ByteArrayInputStream emptyInput = new ByteArrayInputStream(new byte[0]);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class, () -> Registry.deriveKey(template, emptyInput));
assertExceptionContains(e, "validateKeyFormat");
}
@Test
public void testDeriveKey_inexistantKeyMananger_throws() throws Exception {
Registry.reset();
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(AesGcmKeyFormat.getDefaultInstance().toByteString())
.setTypeUrl(new TestKeyTypeManager().getKeyType())
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
ByteArrayInputStream emptyInput = new ByteArrayInputStream(new byte[0]);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class, () -> Registry.deriveKey(template, emptyInput));
assertExceptionContains(e, "No keymanager registered");
}
private static class PublicPrimitiveA {}
private static class PublicPrimitiveB {}
private static class TestPublicKeyTypeManager extends KeyTypeManager<Ed25519PublicKey> {
public TestPublicKeyTypeManager() {
super(
Ed25519PublicKey.class,
new PrimitiveFactory<PublicPrimitiveA, Ed25519PublicKey>(PublicPrimitiveA.class) {
@Override
public PublicPrimitiveA getPrimitive(Ed25519PublicKey key) {
return new PublicPrimitiveA();
}
},
new PrimitiveFactory<PublicPrimitiveB, Ed25519PublicKey>(PublicPrimitiveB.class) {
@Override
public PublicPrimitiveB getPrimitive(Ed25519PublicKey key) {
return new PublicPrimitiveB();
}
});
}
@Override
public String getKeyType() {
return "type.googleapis.com/google.crypto.tink.Ed25519PublicKey";
}
@Override
public int getVersion() {
return 1;
}
@Override
public KeyMaterialType keyMaterialType() {
return KeyMaterialType.ASYMMETRIC_PUBLIC;
}
@Override
public void validateKey(Ed25519PublicKey keyProto) throws GeneralSecurityException {
if (keyProto.getKeyValue().size() != 32) {
throw new GeneralSecurityException("validateKey(Ed25519PublicKey) failed");
}
}
@Override
public Ed25519PublicKey parseKey(ByteString byteString) throws InvalidProtocolBufferException {
return Ed25519PublicKey.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
}
private static class PrivatePrimitiveA {}
private static class PrivatePrimitiveB {}
private static Map<String, KeyTypeManager.KeyFactory.KeyFormat<Ed25519KeyFormat>>
createTestEd25519KeyFormats() {
Map<String, KeyTypeManager.KeyFactory.KeyFormat<Ed25519KeyFormat>> formats = new HashMap<>();
formats.put(
"TINK",
new KeyTypeManager.KeyFactory.KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.TINK));
formats.put(
"RAW",
new KeyTypeManager.KeyFactory.KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.RAW));
return Collections.unmodifiableMap(formats);
}
private static class TestPrivateKeyTypeManager
extends PrivateKeyTypeManager<Ed25519PrivateKey, Ed25519PublicKey> {
public TestPrivateKeyTypeManager() {
super(
Ed25519PrivateKey.class,
Ed25519PublicKey.class,
new PrimitiveFactory<PrivatePrimitiveA, Ed25519PrivateKey>(PrivatePrimitiveA.class) {
@Override
public PrivatePrimitiveA getPrimitive(Ed25519PrivateKey key) {
return new PrivatePrimitiveA();
}
},
new PrimitiveFactory<PrivatePrimitiveB, Ed25519PrivateKey>(PrivatePrimitiveB.class) {
@Override
public PrivatePrimitiveB getPrimitive(Ed25519PrivateKey key) {
return new PrivatePrimitiveB();
}
});
}
@Override
public String getKeyType() {
return "type.googleapis.com/google.crypto.tink.Ed25519PrivateKey";
}
@Override
public int getVersion() {
return 1;
}
@Override
public KeyMaterialType keyMaterialType() {
return KeyMaterialType.ASYMMETRIC_PRIVATE;
}
@Override
public void validateKey(Ed25519PrivateKey keyProto) throws GeneralSecurityException {
// Throw by hand so we can verify the exception comes from here.
if (keyProto.getKeyValue().size() != 32) {
throw new GeneralSecurityException("validateKey(Ed25519PrivateKey) failed");
}
}
@Override
public Ed25519PrivateKey parseKey(ByteString byteString) throws InvalidProtocolBufferException {
return Ed25519PrivateKey.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public Ed25519PublicKey getPublicKey(Ed25519PrivateKey privateKey) {
return privateKey.getPublicKey();
}
}
private static class TestPrivateKeyTypeManagerWithKeyFactory extends TestPrivateKeyTypeManager {
private Map<String, KeyTypeManager.KeyFactory.KeyFormat<Ed25519KeyFormat>> keyFormats =
createTestEd25519KeyFormats();
public TestPrivateKeyTypeManagerWithKeyFactory() {
super();
}
public TestPrivateKeyTypeManagerWithKeyFactory(
Map<String, KeyTypeManager.KeyFactory.KeyFormat<Ed25519KeyFormat>> keyFormats) {
super();
this.keyFormats = keyFormats;
}
@Override
public KeyFactory<Ed25519KeyFormat, Ed25519PrivateKey> keyFactory() {
return new KeyFactory<Ed25519KeyFormat, Ed25519PrivateKey>(Ed25519KeyFormat.class) {
@Override
public void validateKeyFormat(Ed25519KeyFormat format) throws GeneralSecurityException {}
@Override
public Ed25519KeyFormat parseKeyFormat(ByteString byteString)
throws InvalidProtocolBufferException {
return Ed25519KeyFormat.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public Ed25519PrivateKey createKey(Ed25519KeyFormat format)
throws GeneralSecurityException {
return Ed25519PrivateKey.newBuilder()
.setKeyValue(ByteString.copyFrom("created", UTF_8))
.build();
}
@Override
public Ed25519PrivateKey deriveKey(Ed25519KeyFormat format, InputStream inputStream)
throws GeneralSecurityException {
return Ed25519PrivateKey.newBuilder()
.setKeyValue(ByteString.copyFrom("derived", UTF_8))
.build();
}
@Override
public Map<String, KeyFactory.KeyFormat<Ed25519KeyFormat>> keyFormats() {
return keyFormats;
}
};
}
}
@Test
public void testRegisterAssymmetricKeyManagers() throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
}
@Test
public void testRegisterAssymmetricKeyManagers_keyTemplates_works() throws Exception {
Registry.reset();
assertThat(Registry.keyTemplates()).isEmpty();
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), true);
assertThat(Registry.keyTemplates()).hasSize(2);
assertThat(Registry.keyTemplates()).contains("TINK");
assertThat(Registry.keyTemplates()).contains("RAW");
}
@Test
public void testRegisterAssymmetricKeyManagers_disallowedNewKey_keyTemplates_works()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), false);
assertThat(Registry.keyTemplates()).isEmpty();
}
@Test
public void testRegisterAssymmetricKeyManagers_existingKeyManager_noNewKeyTemplate_works()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), true);
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), true);
}
@Test
public void testRegisterAssymmetricKeyManagers_existingKeyManager_newKeyTemplate_fails()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), true);
Map<String, KeyTypeManager.KeyFactory.KeyFormat<Ed25519KeyFormat>> formats = new HashMap<>();
formats.put(
"NEW_KEY_TEMPLATE_NAME",
new KeyTypeManager.KeyFactory.KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.TINK));
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerKeyManager(
new TestPrivateKeyTypeManagerWithKeyFactory(formats), true));
}
@Test
public void testRegisterAssymmetricKeyManagers_newKeyManager_existingKeyTemplate_fails()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPrivateKeyTypeManagerWithKeyFactory(), true);
TestPrivateKeyTypeManagerWithKeyFactory manager =
new TestPrivateKeyTypeManagerWithKeyFactory() {
@Override
public String getKeyType() {
return "blah";
}
};
assertThrows(GeneralSecurityException.class, () -> Registry.registerKeyManager(manager, true));
}
@Test
public void testRegisterAssymmetricKeyManagers_getPrivateKeyManagerPrimitiveA_works()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<PrivatePrimitiveA> km =
Registry.getKeyManager(
new TestPrivateKeyTypeManager().getKeyType(), PrivatePrimitiveA.class);
assertThat(km.getKeyType()).isEqualTo(new TestPrivateKeyTypeManager().getKeyType());
}
@Test
public void testRegisterAssymmetricKeyManagers_getPrivateKeyManagerPrimitiveB_works()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<PrivatePrimitiveB> km =
Registry.getKeyManager(
new TestPrivateKeyTypeManager().getKeyType(), PrivatePrimitiveB.class);
assertThat(km.getKeyType()).isEqualTo(new TestPrivateKeyTypeManager().getKeyType());
}
@Test
public void testRegisterAssymmetricKeyManagers_getPrivateKeyManagerPublicA_works()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<PublicPrimitiveA> km =
Registry.getKeyManager(new TestPublicKeyTypeManager().getKeyType(), PublicPrimitiveA.class);
assertThat(km.getKeyType()).isEqualTo(new TestPublicKeyTypeManager().getKeyType());
}
@Test
public void testRegisterAssymmetricKeyManagers_getPrivateKeyManagerPublicB_works()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<PublicPrimitiveB> km =
Registry.getKeyManager(new TestPublicKeyTypeManager().getKeyType(), PublicPrimitiveB.class);
assertThat(km.getKeyType()).isEqualTo(new TestPublicKeyTypeManager().getKeyType());
}
@Test
public void testRegisterAssymmetricKeyManagers_getPrivateKeyManagerWrongPrimitive_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.getKeyManager(new TestPrivateKeyTypeManager().getKeyType(), Mac.class));
assertExceptionContains(e, "com.google.crypto.tink.Mac");
assertExceptionContains(e, "PrivatePrimitiveA");
assertExceptionContains(e, "PrivatePrimitiveB");
}
@Test
public void testRegisterAssymmetricKeyManagers_getPublicKeyManagerWrongPrimitive_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() -> Registry.getKeyManager(new TestPublicKeyTypeManager().getKeyType(), Mac.class));
assertExceptionContains(e, "com.google.crypto.tink.Mac");
assertExceptionContains(e, "PublicPrimitiveA");
assertExceptionContains(e, "PublicPrimitiveB");
}
// Checks that calling getUntypedKeyManager will return the keymanager for the *first* implemented
// class in the constructor.
@Test
public void testRegisterAssymmetricKeyManagers_getUntypedPrivateKeyManager_returnsPrimitiveA()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<?> km = Registry.getUntypedKeyManager(new TestPrivateKeyTypeManager().getKeyType());
assertThat(km.getPrimitiveClass()).isEqualTo(PrivatePrimitiveA.class);
}
// Checks that calling getUntypedKeyManager will return the keymanager for the *first* implemented
// class in the constructor.
@Test
public void testRegisterAssymmetricKeyManagers_getUntypedPublicKeyManager_returnsPrimitiveA()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
KeyManager<?> km = Registry.getUntypedKeyManager(new TestPublicKeyTypeManager().getKeyType());
assertThat(km.getPrimitiveClass()).isEqualTo(PublicPrimitiveA.class);
}
@Test
public void testRegisterAssymmetricKeyManagers_newKeyAllowed_withoutKeyFactory_fails()
throws Exception {
Registry.reset();
assertThrows(
UnsupportedOperationException.class,
() ->
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), true));
}
@Test
public void testRegisterAssymmetricKeyManagers_moreRestrictedNewKeyAllowed_shouldWork()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), true);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), false);
}
@Test
public void testRegisterAssymmetricKeyManagers_sameNewKeyAllowed_shouldWork() throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), true);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), true);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), false);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), false);
}
@Test
public void testRegisterAssymmetricKeyManagers_lessRestrictedNewKeyAllowed_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(),
new TestPublicKeyTypeManager(),
true));
}
@Test
public void testRegisterAssymmetricKeyManagers_publicKeyManagerCanBeRegisteredAlone()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPublicKeyTypeManager(), false);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), true);
Registry.registerKeyManager(new TestPublicKeyTypeManager(), false);
}
@Test
public void testRegisterAssymmetricKeyManagers_publicKeyManagerReRegister_getPublicKeyData()
throws Exception {
Registry.reset();
Registry.registerKeyManager(new TestPublicKeyTypeManager(), false);
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
Registry.registerKeyManager(new TestPublicKeyTypeManager(), false);
// Check that getPublicKeyData works now.
Ed25519PrivateKey privateKey =
Ed25519PrivateKey.newBuilder()
.setKeyValue(ByteString.copyFrom(Random.randBytes(32)))
.setPublicKey(
Ed25519PublicKey.newBuilder()
.setKeyValue(ByteString.copyFrom(Random.randBytes(32))))
.build();
KeyData publicKeyData =
Registry.getPublicKeyData(
new TestPrivateKeyTypeManager().getKeyType(), privateKey.toByteString());
assertThat(publicKeyData.getTypeUrl()).isEqualTo(new TestPublicKeyTypeManager().getKeyType());
Ed25519PublicKey publicKey =
Ed25519PublicKey.parseFrom(
publicKeyData.getValue(), ExtensionRegistryLite.getEmptyRegistry());
assertThat(publicKey.getKeyValue()).isEqualTo(privateKey.getPublicKey().getKeyValue());
}
@Test
public void testRegisterAssymmetricKeyManagers_differentClassPrivateKey_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager() {}, new TestPublicKeyTypeManager(), false));
}
@Test
public void testRegisterAssymmetricKeyManagers_differentClassPublicKey_throws() throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerAsymmetricKeyManagers(
// Note: due to the {} this is a subclass of TestPublicKeyTypeManager.
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager() {}, false));
}
@Test
public void testRegisterAssymmetricKeyManagers_thenNormalRegister_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
// Note: due to the {} this is a subclass of TestPublicKeyTypeManager.
() -> Registry.registerKeyManager(new TestPrivateKeyTypeManager() {}, false));
}
@Test
public void testRegisterAssymmetricKeyManagers_thenNormalRegisterForPublic_throws()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
assertThrows(
GeneralSecurityException.class,
// Note: due to the {} this is a subclass of TestPublicKeyTypeManager.
() -> Registry.registerKeyManager(new TestPublicKeyTypeManager() {}, false));
}
@Test
public void testRegisterAssymmetricKeyManagers_throwsWithDifferentPublicKeyManager()
throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
GeneralSecurityException e =
assertThrows(
GeneralSecurityException.class,
() ->
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(),
new TestPublicKeyTypeManager() {
@Override
public String getKeyType() {
return "bla";
}
},
false));
assertExceptionContains(e, "public key manager corresponding to");
}
@Test
public void testAsymmetricKeyManagers_deriveKey_withoutKeyFactory() throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false);
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(Ed25519KeyFormat.getDefaultInstance().toByteString())
.setTypeUrl(new TestPrivateKeyTypeManager().getKeyType())
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
assertThrows(
UnsupportedOperationException.class,
() -> Registry.deriveKey(template, new ByteArrayInputStream(new byte[0])));
}
@Test
public void testAsymmetricKeyManagers_deriveKey() throws Exception {
Registry.reset();
Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManagerWithKeyFactory(), new TestPublicKeyTypeManager(), true);
com.google.crypto.tink.proto.KeyTemplate template =
com.google.crypto.tink.proto.KeyTemplate.newBuilder()
.setValue(Ed25519KeyFormat.getDefaultInstance().toByteString())
.setTypeUrl(new TestPrivateKeyTypeManagerWithKeyFactory().getKeyType())
.setOutputPrefixType(OutputPrefixType.TINK)
.build();
KeyData keyData = Registry.deriveKey(template, new ByteArrayInputStream(new byte[0]));
Ed25519PrivateKey key =
Ed25519PrivateKey.parseFrom(keyData.getValue(), ExtensionRegistryLite.getEmptyRegistry());
assertThat(key.getKeyValue()).isEqualTo(ByteString.copyFrom("derived", UTF_8));
}
private static class Catalogue1 implements Catalogue<Aead> {
@Override
public KeyManager<Aead> getKeyManager(String typeUrl, String primitiveName, int minVersion) {
return null;
}
@Override
public PrimitiveWrapper<Aead, Aead> getPrimitiveWrapper() {
return null;
}
}
private static class Catalogue2 implements Catalogue<Aead> {
@Override
public KeyManager<Aead> getKeyManager(String typeUrl, String primitiveName, int minVersion) {
return null;
}
@Override
public PrimitiveWrapper<Aead, Aead> getPrimitiveWrapper() {
return null;
}
}
private static class Catalogue3 implements Catalogue<Aead> {
@Override
public KeyManager<Aead> getKeyManager(String typeUrl, String primitiveName, int minVersion) {
return null;
}
@Override
public PrimitiveWrapper<Aead, Aead> getPrimitiveWrapper() {
return null;
}
}
@Test
public void testAddCatalogue_multiThreads_shouldWork() throws Exception {
final boolean[] threwException = new boolean[3];
Thread thread1 =
new Thread(
new Runnable() {
@Override
public void run() {
try {
Registry.addCatalogue("catalogue", new Catalogue1());
threwException[0] = false;
} catch (GeneralSecurityException e) {
threwException[0] = true;
}
}
});
Thread thread2 =
new Thread(
new Runnable() {
@Override
public void run() {
try {
Registry.addCatalogue("catalogue", new Catalogue2());
threwException[1] = false;
} catch (GeneralSecurityException e) {
threwException[1] = true;
}
}
});
Thread thread3 =
new Thread(
new Runnable() {
@Override
public void run() {
try {
Registry.addCatalogue("catalogue", new Catalogue3());
threwException[2] = false;
} catch (GeneralSecurityException e) {
threwException[2] = true;
}
}
});
// Start the threads.
thread1.start();
thread2.start();
thread3.start();
// Wait until all threads finished.
thread1.join();
thread2.join();
thread3.join();
// Count threads that threw exception.
int count = 0;
for (int i = 0; i < 3; i++) {
if (threwException[i]) {
count++;
}
}
assertThat(count).isEqualTo(2);
}
// TODO(przydatek): Add more tests for creation of PrimitiveSets.
private static PrimitiveSet<Aead> createAeadPrimitiveSet() throws Exception {
return TestUtil.createPrimitiveSet(
TestUtil.createKeyset(
Keyset.Key.newBuilder()
.setKeyData(Registry.newKeyData(AesEaxKeyManager.aes128EaxTemplate()))
.setKeyId(1)
.setStatus(KeyStatusType.ENABLED)
.setOutputPrefixType(OutputPrefixType.TINK)
.build()),
Aead.class);
}
@Test
public void testWrap_wrapperRegistered() throws Exception {
Registry.wrap(createAeadPrimitiveSet());
}
@Test
public void testWrap_noWrapperRegistered_throws() throws Exception {
PrimitiveSet<Aead> aeadSet = createAeadPrimitiveSet();
Registry.reset();
GeneralSecurityException e =
assertThrows(GeneralSecurityException.class, () -> Registry.wrap(aeadSet));
assertExceptionContains(e, "No wrapper found");
assertExceptionContains(e, "Aead");
}
@Test
public void testWrap_wrapAsEncryptOnly() throws Exception {
// Check that Registry.wrap can be assigned to an EncryptOnly (as there's a suppress warning).
EncryptOnly encrypt = Registry.wrap(createAeadPrimitiveSet(), EncryptOnly.class);
assertThat(encrypt).isNotNull();
}
@Test
public void testWrap_registerSecondWrapperForEncryptOnly_throws() throws Exception {
assertThrows(
GeneralSecurityException.class,
() -> {
Registry.registerPrimitiveWrapper(
new PrimitiveWrapper<Mac, EncryptOnly>() {
@Override
public EncryptOnly wrap(PrimitiveSet<Mac> primitiveSet) {
return null;
}
@Override
public Class<EncryptOnly> getPrimitiveClass() {
return EncryptOnly.class;
}
@Override
public Class<Mac> getInputPrimitiveClass() {
return Mac.class;
}
});
});
}
@Test
public void testFips_succeedsOnEmptyRegistry() throws Exception {
Registry.reset();
Registry.restrictToFipsIfEmpty();
assertTrue(TinkFipsUtil.useOnlyFips());
}
@Test
public void testFips_failsOnNonEmptyRegistry() throws Exception {
assertThrows(GeneralSecurityException.class, Registry::restrictToFipsIfEmpty);
}
@Test
public void testFips_registerNonFipsKeyTypeManagerFails() throws Exception {
Assume.assumeTrue(TinkFipsUtil.fipsModuleAvailable());
Registry.reset();
Registry.restrictToFipsIfEmpty();
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerKeyManager(new TestKeyTypeManager(), true));
}
@Test
public void testFips_registerFipsKeyTypeManagerSucceeds() throws Exception {
Assume.assumeTrue(TinkFipsUtil.fipsModuleAvailable());
Registry.reset();
Registry.restrictToFipsIfEmpty();
AesGcmKeyManager.register(true);
}
@Test
public void testFips_registerNonFipsKeyTypeManagerAsymmetricFails() throws Exception {
Assume.assumeTrue(TinkFipsUtil.fipsModuleAvailable());
Registry.reset();
Registry.restrictToFipsIfEmpty();
assertThrows(
GeneralSecurityException.class,
() -> Registry.registerAsymmetricKeyManagers(
new TestPrivateKeyTypeManager(), new TestPublicKeyTypeManager(), false));
}
@Test
public void testFips_registerFipsKeyTypeManagerAsymmetricSucceeds() throws Exception {
Assume.assumeTrue(TinkFipsUtil.fipsModuleAvailable());
Registry.reset();
Registry.restrictToFipsIfEmpty();
EcdsaSignKeyManager.registerPair(true);
}
private static class FakeAead {}
}
| |
package com.s4game.server.stage.entity;
import java.io.Serializable;
import java.sql.Timestamp;
import com.s4game.core.data.AbsVersion;
import com.s4game.core.data.IEntity;
public class RoleStage extends AbsVersion implements Serializable, IEntity {
private static final long serialVersionUID = 1L;
private String userRoleId;
private String mapId;
private Integer mapX;
private Integer mapY;
private Integer hp;
private Integer mp;
private Integer maxHp;
private Integer maxMp;
private String buff;
private String props;
private Integer state;
private String mapNode;
private Integer tiLi;
private Integer lineNo;
private String pkInfo;
private Integer shanbiVal;
private String meirenInfo;
private String zuoqiInfo;
private Integer freeFlyCount;
private Long flyCountRefreshTime;
private String copyInfo;
private Timestamp logUpdateTime;
public String getUserRoleId() {
return this.userRoleId;
}
public void setUserRoleId(String paramString) {
this.userRoleId = paramString;
}
public String getMapId() {
return this.mapId;
}
public void setMapId(String paramString) {
this.mapId = paramString;
}
public Integer getMapX() {
return this.mapX;
}
public void setMapX(Integer paramInteger) {
this.mapX = paramInteger;
}
public Integer getMapY() {
return this.mapY;
}
public void setMapY(Integer paramInteger) {
this.mapY = paramInteger;
}
public Integer getHp() {
return this.hp;
}
public void setHp(Integer paramInteger) {
this.hp = paramInteger;
}
public Integer getMp() {
return this.mp;
}
public void setMp(Integer paramInteger) {
this.mp = paramInteger;
}
public Integer getMaxHp() {
return this.maxHp;
}
public void setMaxHp(Integer paramInteger) {
this.maxHp = paramInteger;
}
public Integer getMaxMp() {
return this.maxMp;
}
public void setMaxMp(Integer paramInteger) {
this.maxMp = paramInteger;
}
public String getBuff() {
return this.buff;
}
public void setBuff(String paramString) {
this.buff = paramString;
}
public String getProps() {
return this.props;
}
public void setProps(String paramString) {
this.props = paramString;
}
public Integer getState() {
return this.state;
}
public void setState(Integer paramInteger) {
this.state = paramInteger;
}
public String getMapNode() {
return this.mapNode;
}
public void setMapNode(String paramString) {
this.mapNode = paramString;
}
public Integer getTiLi() {
return this.tiLi;
}
public void setTiLi(Integer paramInteger) {
this.tiLi = paramInteger;
}
public Integer getLineNo() {
return this.lineNo;
}
public void setLineNo(Integer paramInteger) {
this.lineNo = paramInteger;
}
public String getPkInfo() {
return this.pkInfo;
}
public void setPkInfo(String paramString) {
this.pkInfo = paramString;
}
public Integer getShanbiVal() {
return this.shanbiVal;
}
public void setShanbiVal(Integer paramInteger) {
this.shanbiVal = paramInteger;
}
public String getMeirenInfo() {
return this.meirenInfo;
}
public void setMeirenInfo(String paramString) {
this.meirenInfo = paramString;
}
public Integer getFreeFlyCount() {
return this.freeFlyCount;
}
public void setFreeFlyCount(Integer paramInteger) {
this.freeFlyCount = paramInteger;
}
public Long getFlyCountRefreshTime() {
return this.flyCountRefreshTime;
}
public void setFlyCountRefreshTime(Long paramLong) {
this.flyCountRefreshTime = paramLong;
}
public String getZuoqiInfo() {
return this.zuoqiInfo;
}
public void setZuoqiInfo(String paramString) {
this.zuoqiInfo = paramString;
}
public String getCopyInfo() {
return this.copyInfo;
}
public void setCopyInfo(String paramString) {
this.copyInfo = paramString;
}
public Timestamp getLogUpdateTime() {
return this.logUpdateTime;
}
public void setLogUpdateTime(Timestamp paramTimestamp) {
this.logUpdateTime = paramTimestamp;
}
public String getPirmaryKeyName() {
return "userRoleId";
}
public String getPrimaryKeyValue() {
return getUserRoleId();
}
public RoleStage copy() {
RoleStage localRoleStage = new RoleStage();
localRoleStage.setUserRoleId(getUserRoleId());
localRoleStage.setMapId(getMapId());
localRoleStage.setMapX(getMapX());
localRoleStage.setMapY(getMapY());
localRoleStage.setHp(getHp());
localRoleStage.setMp(getMp());
localRoleStage.setMaxHp(getMaxHp());
localRoleStage.setMaxMp(getMaxMp());
localRoleStage.setBuff(getBuff());
localRoleStage.setProps(getProps());
localRoleStage.setState(getState());
localRoleStage.setMapNode(getMapNode());
localRoleStage.setTiLi(getTiLi());
localRoleStage.setLineNo(getLineNo());
localRoleStage.setPkInfo(getPkInfo());
localRoleStage.setShanbiVal(getShanbiVal());
localRoleStage.setMeirenInfo(getMeirenInfo());
localRoleStage.setFreeFlyCount(getFreeFlyCount());
localRoleStage.setFlyCountRefreshTime(getFlyCountRefreshTime());
localRoleStage.setZuoqiInfo(getZuoqiInfo());
localRoleStage.setCopyInfo(getCopyInfo());
localRoleStage.setLogUpdateTime(getLogUpdateTime());
return localRoleStage;
}
}
| |
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.meidusa.venus.validate.util.location;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* A simple immutable and serializable implementation of {@link Location}.
*/
public class LocationImpl implements Location, Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
private final String uri;
private final int line;
private final int column;
private final String description;
// Package private: outside this package, use Location.UNKNOWN.
static final LocationImpl UNKNOWN = new LocationImpl(null, null, -1, -1);
/**
* Build a location for a given URI, with unknown line and column numbers.
*
* @param uri the resource URI
*/
public LocationImpl(String description, String uri) {
this(description, uri, -1, -1);
}
/**
* Build a location for a given URI and line and column numbers.
*
* @param uri the resource URI
* @param line the line number (starts at 1)
* @param column the column number (starts at 1)
*/
public LocationImpl(String description, String uri, int line, int column) {
if (uri == null || uri.length() == 0) {
this.uri = null;
this.line = -1;
this.column = -1;
} else {
this.uri = uri;
this.line = line;
this.column = column;
}
if (description != null && description.length() == 0) {
description = null;
}
this.description = description;
}
/**
* Copy constructor.
*
* @param location the location to be copied
*/
public LocationImpl(Location location) {
this(location.getDescription(), location.getURI(), location.getLineNumber(), location.getColumnNumber());
}
/**
* Create a location from an existing one, but with a different description
*/
public LocationImpl(String description, Location location) {
this(description, location.getURI(), location.getLineNumber(), location.getColumnNumber());
}
/**
* Obtain a <code>LocationImpl</code> from a {@link Location}. If <code>location</code> is already a
* <code>LocationImpl</code>, it is returned, otherwise it is copied.
* <p>
* This method is useful when an immutable and serializable location is needed, such as in locatable exceptions.
*
* @param location the location
* @return an immutable and serializable version of <code>location</code>
*/
public static LocationImpl get(Location location) {
if (location instanceof LocationImpl) {
return (LocationImpl) location;
} else if (location == null) {
return UNKNOWN;
} else {
return new LocationImpl(location);
}
}
/**
* Get the description of this location
*
* @return the description (can be <code>null</code>)
*/
public String getDescription() {
return this.description;
}
/**
* Get the URI of this location
*
* @return the URI (<code>null</code> if unknown).
*/
public String getURI() {
return this.uri;
}
/**
* Get the line number of this location
*
* @return the line number (<code>-1</code> if unknown)
*/
public int getLineNumber() {
return this.line;
}
/**
* Get the column number of this location
*
* @return the column number (<code>-1</code> if unknown)
*/
public int getColumnNumber() {
return this.column;
}
/**
* Gets a source code snippet with the default padding
*
* @param padding The amount of lines before and after the error to include
*/
public List<String> getSnippet(int padding) {
List<String> snippet = new ArrayList<String>();
if (getLineNumber() > 0) {
InputStream in = null;
BufferedReader reader = null;
try {
in = new URL(getURI()).openStream();
reader = new BufferedReader(new InputStreamReader(in));
int lineno = 0;
int errno = getLineNumber();
String line;
while ((line = reader.readLine()) != null) {
lineno++;
if (lineno >= errno - padding && lineno <= errno + padding) {
snippet.add(line);
}
}
} catch (Exception ex) {
// ignoring as snippet not available isn't a big deal
}finally{
if(reader != null){
try {
reader.close();
} catch (IOException e) {
}
}
if(in != null){
try {
in.close();
} catch (IOException e) {
}
}
}
}
return snippet;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Location) {
Location other = (Location) obj;
return this.line == other.getLineNumber() && this.column == other.getColumnNumber() && testEquals(this.uri, other.getURI())
&& testEquals(this.description, other.getDescription());
}
return false;
}
@Override
public int hashCode() {
int hash = line ^ column;
if (uri != null)
hash ^= uri.hashCode();
if (description != null)
hash ^= description.hashCode();
return hash;
}
@Override
public String toString() {
return LocationUtils.toString(this);
}
/**
* Ensure serialized unknown location resolve to {@link Location#UNKNOWN}.
*/
private Object readResolve() {
return this.equals(Location.UNKNOWN) ? Location.UNKNOWN : this;
}
private boolean testEquals(Object object1, Object object2) {
if (object1 == object2) {
return true;
}
if ((object1 == null) || (object2 == null)) {
return false;
}
return object1.equals(object2);
}
}
| |
/*
* #%L
* Diana UI Core
* %%
* Copyright (C) 2014 Diana UI
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.dianaui.universal.core.client.ui.constants;
/**
* @author Sven Jacobs
* @author <a href='mailto:donbeave@gmail.com'>Alexey Zhokhov</a>
*/
public final class Styles {
public static final String ACTIVE = "active";
public static final String ALERT = "alert";
public static final String ALERT_DISMISSABLE = "alert-dismissable";
public static final String ALERT_LINK = "alert-link";
public static final String ARROW = "arrow";
public static final String BADGE = "badge";
public static final String BOTTOM = "bottom";
public static final String BREADCRUMB = "breadcrumb";
public static final String BTN = "btn";
public static final String BTN_BLOCK = "btn-block";
public static final String BTN_GROUP = "btn-group";
public static final String BTN_GROUP_JUSTIFIED = "btn-group-justified";
public static final String BTN_GROUP_VERTICAL = "btn-group-vertical";
public static final String BTN_TOOLBAR = "btn-toolbar";
public static final String CAPTION = "caption";
public static final String CARET = "caret";
public static final String CAROUSEL = "carousel";
public static final String CAROUSEL_CAPTION = "carousel-caption";
public static final String CAROUSEL_CONTROL = "carousel-control";
public static final String CAROUSEL_INDICATORS = "carousel-indicators";
public static final String CAROUSEL_INNER = "carousel-inner";
public static final String CENTER_BLOCK = "center-block";
public static final String CHECKBOX = "checkbox";
public static final String CHECKBOX_INLINE = "checkbox-inline";
public static final String CLEARFIX = "clearfix";
public static final String CLOSE = "close";
public static final String COLLAPSE = "collapse";
public static final String COLLAPSING = "collapsing";
public static final String CONTAINER = "container";
public static final String CONTAINER_FLUID = "container-fluid";
public static final String CONTROL_LABEL = "control-label";
public static final String DATETIMEPICKER_WIDGET = "bootstrap-datetimepicker-widget";
public static final String DATETIMEPICKER_SWITCH = "picker-switch";
public static final String DATEPICKER = "datepicker";
public static final String DATEPICKER_DAYS = "datepicker-days";
public static final String DATEPICKER_MONTHS = "datepicker-months";
public static final String TIMEPICKER_WIDGET = "bootstrap-timepicker-widget";
public static final String TIMEPICKER_WIDGET_HOUR = "bootstrap-timepicker-hour";
public static final String TIMEPICKER_WIDGET_MINUTE = "bootstrap-timepicker-minute";
public static final String TIMEPICKER_WIDGET_SECOND = "bootstrap-timepicker-second";
public static final String DIVIDER = "divider";
public static final String DISABLED = "disabled";
public static final String DL_HORIZONTAL = "dl-horizontal";
public static final String DROP_UP = "dropup";
public static final String DROPDOWN = "dropdown";
public static final String DROPDOWN_HEADER = "dropdown-header";
public static final String DROPDOWN_MENU = "dropdown-menu";
public static final String DROPDOWN_TOGGLE = "dropdown-toggle";
public static final String FADE = "fade";
public static final String FONT_AWESOME_BASE = "fa";
public static final String FONT_AWESOME_UL = "fa-ul";
public static final String FONT_AWESOME_LI = "fa-li";
public static final String FORM_CONTROL = "form-control";
public static final String FORM_CONTROL_FEEDBACK = "form-control-feedback";
public static final String FORM_CONTROL_STATIC = "form-control-static";
public static final String FORM_GROUP = "form-group";
public static final String GLYPHICON_BASE = "glyphicon";
public static final String HAS_FEEDBACK = "has-feedback";
public static final String HELP_BLOCK = "help-block";
public static final String ITEM = "item";
public static final String ICON_LIGHT = "fa-light";
public static final String ICON_MUTED = "fa-muted";
public static final String ICON_BORDER = "fa-border";
public static final String ICON_STACK = "fa-stack";
public static final String ICON_SPIN = "fa-spin";
public static final String ICON_FIXED_WIDTH = "fa-fw";
public static final String ICON_STACK_BASE = "fa-stack-2x";
public static final String ICON_STACK_TOP = "fa-stack-1x";
public static final String ICON_BAR = "icon-bar";
public static final String ICON_PREV = "icon-prev";
public static final String ICON_NEXT = "icon-next";
public static final String IMG_RESPONSIVE = "img-responsive";
public static final String IN = "in";
public static final String INPUT_GROUP = "input-group";
public static final String INPUT_GROUP_ADDON = "input-group-addon";
public static final String INPUT_GROUP_BTN = "input-group-btn";
public static final String JUMBOTRON = "jumbotron";
public static final String LABEL = "label";
public static final String LEAD = "lead";
public static final String LEFT = "left";
public static final String LIST_INLINE = "list-inline";
public static final String LIST_GROUP = "list-group";
public static final String LIST_GROUP_ITEM = "list-group-item";
public static final String LIST_GROUP_ITEM_HEADING = "list-group-item-heading";
public static final String LIST_GROUP_ITEM_TEXT = "list-group-item-text";
public static final String LIST_UNSTYLED = "list-unstyled";
public static final String MEDIA_LIST = "media-list";
public static final String MEDIA = "media";
public static final String MEDIA_OBJECT = "media-object";
public static final String MEDIA_BODY = "media-body";
public static final String MEDIA_HEADING = "media-heading";
public static final String MODAL = "modal";
public static final String MODAL_BACKDROP = "modal-backdrop";
public static final String MODAL_BODY = "modal-body";
public static final String MODAL_CONTENT = "modal-content";
public static final String MODAL_DIALOG = "modal-dialog";
public static final String MODAL_FOOTER = "modal-footer";
public static final String MODAL_HEADER = "modal-header";
public static final String MODAL_TITLE = "modal-title";
public static final String NAV = "nav";
public static final String NAV_JUSTIFIED = "nav-justified";
public static final String NAV_PILLS = "nav-pills";
public static final String NAV_STACKED = "nav-stacked";
public static final String NAV_TABS = "nav-tabs";
public static final String NAVBAR = "navbar";
public static final String NAVBAR_BRAND = "navbar-brand";
public static final String NAVBAR_COLLAPSE = "navbar-collapse";
public static final String NAVBAR_FORM = "navbar-form";
public static final String NAVBAR_HEADER = "navbar-header";
public static final String NAVBAR_LINK = "navbar-link";
public static final String NAVBAR_NAV = "navbar-nav";
public static final String NAVBAR_TEXT = "navbar-text";
public static final String NAVBAR_TOGGLE = "navbar-toggle";
public static final String NAVBAR_LEFT = "navbar-left";
public static final String NAVBAR_RIGHT = "navbar-right";
public static final String OPEN = "open";
public static final String OUT = "out";
public static final String NEXT = "next";
public static final String PANEL = "panel";
public static final String PANEL_HEADING = "panel-heading";
public static final String PANEL_GROUP = "panel-group";
public static final String PANEL_TITLE = "panel-title";
public static final String PANEL_BODY = "panel-body";
public static final String PANEL_COLLAPSE = "panel-collapse";
public static final String PANEL_FOOTER = "panel-footer";
public static final String PAGE_HEADER = "page-header";
public static final String PAGER = "pager";
public static final String PAGINATION = "pagination";
public static final String POPOVER = "popover";
public static final String POPOVER_TITLE = "popover-title";
public static final String POPOVER_CONTENT = "popover-content";
public static final String PRE_SCROLLABLE = "pre-scrollable";
public static final String PREV = "prev";
public static final String PREVIOUS = "previous";
public static final String PROGRESS = "progress";
public static final String PROGRESS_BAR = "progress-bar";
public static final String PULL_LEFT = "pull-left";
public static final String PULL_RIGHT = "pull-right";
public static final String RADIO = "radio";
public static final String RADIO_INLINE = "radio-inline";
public static final String RIGHT = "right";
public static final String ROW = "row";
public static final String SLIDE = "slide";
public static final String SEPARATOR = "separator";
public static final String SR_ONLY = "sr-only";
public static final String SWITCH = "switch";
public static final String TABBABLE = "tabbable";
public static final String TAB_CONTENT = "tab-content";
public static final String TAB_PANE = "tab-pane";
public static final String TABLE = "table";
public static final String TABLE_CONDENSED = "table-condensed";
public static final String THUMBNAIL = "thumbnail";
public static final String TIMEPICKER = "timepicker";
public static final String TIMEPICKER_PICKER = "timepicker-picker";
public static final String TIMEPICKER_HOURS = "timepicker-hours";
public static final String TIMEPICKER_MINUTES = "timepicker-minutes";
public static final String TIMEPICKER_HOUR = "timepicker-hour";
public static final String TIMEPICKER_MINUTE = "timepicker-minute";
public static final String TOOLTIP = "tooltip";
public static final String TOOLTIP_INNER = "tooltip-inner";
public static final String TOOLTIP_ARROW = "tooltip-arrow";
public static final String TOP = "top";
public static final String WELL = "well";
public static final String UNSTYLED = "unstyled";
private Styles() {
}
}
| |
/*
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.materials;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.domain.MaterialRevision;
import com.thoughtworks.go.domain.materials.*;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import com.thoughtworks.go.util.command.UrlArgument;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.util.Map;
import static com.thoughtworks.go.util.command.EnvironmentVariableContext.escapeEnvironmentVariable;
/**
* @understands a source control repository and its configuration
*/
public abstract class ScmMaterial extends AbstractMaterial {
public static final String GO_REVISION = "GO_REVISION";
public static final String GO_TO_REVISION = "GO_TO_REVISION";
public static final String GO_FROM_REVISION = "GO_FROM_REVISION";
protected Filter filter;
protected String folder;
protected boolean autoUpdate = true;
protected boolean invertFilter = false;
public ScmMaterial(String typeName) {
super(typeName);
}
@Override protected void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria) {
basicCriteria.put("dest", folder);
}
public File workingdir(File baseFolder) {
if (getFolder() == null) {
return baseFolder;
}
return new File(baseFolder, getFolder());
}
public String updatingTarget() {
return StringUtils.isEmpty(getFolder()) ? "files" : getFolder();
}
public void toJson(Map json, Revision revision) {
json.put("folder", getFolder() == null ? "" : getFolder());
json.put("scmType", getTypeForDisplay());
json.put("location", getLocation());
if (!CaseInsensitiveString.isBlank(getName())) {
json.put("materialName", CaseInsensitiveString.str(getName()));
}
json.put("action", "Modified");
}
//most of the material such as hg, git, p4 all print the file from the root without '/'
//but subverion print it with '/', we standarize it here. look at the implementation of subversion as well.
public boolean matches(String name, String regex) {
if (regex.startsWith("/")) {
regex = regex.substring(1);
}
return name.matches(regex);
}
public void checkout(File baseDir, Revision revision, SubprocessExecutionContext execCtx) {
InMemoryStreamConsumer output = ProcessOutputStreamConsumer.inMemoryConsumer();
this.updateTo(output, baseDir, new RevisionContext(revision), execCtx);
}
public abstract String getUserName();
public abstract String getPassword();
public abstract String getEncryptedPassword();
public abstract boolean isCheckExternals();
public abstract String getUrl();
protected abstract UrlArgument getUrlArgument();
protected abstract String getLocation();
public void setFilter(Filter filter) {
this.filter = filter;
}
public void emailContent(StringBuilder content, Modification modification) {
content.append(getTypeForDisplay() + ": " + getLocation()).append('\n').append(
String.format("revision: %s, modified by %s on %s", modification.getRevision(),
modification.getUserName(), modification.getModifiedTime()))
.append('\n')
.append(modification.getComment());
}
public String getDescription() {
return getUriForDisplay();
}
public String getUriForDisplay() {
return getUrlArgument().forDisplay();
}
public void populateEnvironmentContext(EnvironmentVariableContext environmentVariableContext, MaterialRevision materialRevision, File workingDir) {
String toRevision = materialRevision.getRevision().getRevision();
String fromRevision = materialRevision.getOldestRevision().getRevision();
setGoRevisionVariables(environmentVariableContext, fromRevision, toRevision);
}
private void setGoRevisionVariables(EnvironmentVariableContext environmentVariableContext, String fromRevision, String toRevision) {
setVariableWithName(environmentVariableContext, toRevision, GO_REVISION);
setVariableWithName(environmentVariableContext, toRevision, GO_TO_REVISION);
setVariableWithName(environmentVariableContext, fromRevision, GO_FROM_REVISION);
}
protected void setVariableWithName(EnvironmentVariableContext environmentVariableContext, String value, String propertyName) {
if (!CaseInsensitiveString.isBlank(this.name)) {
environmentVariableContext.setProperty(propertyName + "_" + escapeEnvironmentVariable(this.name.toUpper()), value, false);
return;
}
String scrubbedFolder = escapeEnvironmentVariable(folder);
if (!StringUtils.isEmpty(scrubbedFolder)) {
environmentVariableContext.setProperty(propertyName + "_" + scrubbedFolder, value, false);
} else {
environmentVariableContext.setProperty(propertyName, value, false);
}
}
public String getFolder() {
return folder;
}
public String getDisplayName() {
return name == null ? getUriForDisplay() : CaseInsensitiveString.str(name);
}
public boolean isAutoUpdate() {
return autoUpdate;
}
public boolean getAutoUpdate() {
return autoUpdate;
}
public void setAutoUpdate(boolean value) {
autoUpdate = value;
}
public boolean isInvertFilter() {
return invertFilter;
}
public boolean getInvertFilter() {
return invertFilter;
}
public void setInvertFilter(boolean value) {
invertFilter = value;
}
public final MatchedRevision createMatchedRevision(Modification modification, String searchString) {
return new MatchedRevision(searchString, getShortRevision(modification.getRevision()), modification.getRevision(), modification.getUserName(), modification.getModifiedTime(),
modification.getComment());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
ScmMaterial that = (ScmMaterial) o;
if (folder != null ? !folder.equals(that.folder) : that.folder != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (folder != null ? folder.hashCode() : 0);
return result;
}
public static String changesetUrl(Modification modification, String baseUrl, final long id) {
return baseUrl + "/api/materials/" + id + "/changeset/" + modification.getRevision() + ".xml";
}
public Boolean isUsedInFetchArtifact(PipelineConfig pipelineConfig) {
return false;
}
// TODO: Consider renaming this to dest since we use that word in the UI & Config
public void setFolder(String folder) {
this.folder = folder;
}
public Revision oldestRevision(Modifications modifications) {
return Modification.oldestRevision(modifications);
}
@Override
public boolean supportsDestinationFolder() {
return true;
}
}
| |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.framework.components.batchjobs.helpers;
import static org.easymock.EasyMock.expect;
import static org.easymock.classextension.EasyMock.createMock;
import static org.easymock.classextension.EasyMock.replay;
import static org.mifos.application.meeting.util.helpers.MeetingType.CUSTOMER_MEETING;
import static org.mifos.application.meeting.util.helpers.RecurrenceType.WEEKLY;
import static org.mifos.framework.util.helpers.TestObjectFactory.EVERY_SECOND_WEEK;
import static org.mifos.framework.util.helpers.TestObjectFactory.EVERY_WEEK;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Set;
import junit.framework.Assert;
import org.hibernate.Query;
import org.mifos.accounts.business.AccountActionDateEntity;
import org.mifos.accounts.business.AccountFeesActionDetailEntity;
import org.mifos.accounts.business.AccountFeesEntity;
import org.mifos.accounts.fees.business.AmountFeeBO;
import org.mifos.accounts.fees.business.FeeBO;
import org.mifos.accounts.fees.util.helpers.FeeCategory;
import org.mifos.accounts.persistence.AccountPersistence;
import org.mifos.application.meeting.business.MeetingBO;
import org.mifos.application.meeting.business.WeekDaysEntity;
import org.mifos.application.meeting.util.helpers.RecurrenceType;
import org.mifos.application.meeting.util.helpers.WeekDay;
import org.mifos.customers.business.CustomerAccountBOTestUtils;
import org.mifos.customers.business.CustomerActivityEntity;
import org.mifos.customers.business.CustomerBO;
import org.mifos.customers.business.CustomerScheduleEntity;
import org.mifos.framework.MifosIntegrationTestCase;
import org.mifos.framework.components.batchjobs.SchedulerConstants;
import org.mifos.framework.components.batchjobs.business.Task;
import org.mifos.framework.components.batchjobs.exceptions.BatchJobException;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
import org.mifos.framework.util.helpers.DateUtils;
import org.mifos.framework.util.helpers.Money;
import org.mifos.framework.util.helpers.TestObjectFactory;
public class CustomerFeeHelperIntegrationTest extends MifosIntegrationTestCase {
public CustomerFeeHelperIntegrationTest() throws Exception {
super();
}
private CustomerBO center;
@Override
protected void setUp() throws Exception {
super.setUp();
}
@Override
public void tearDown() throws Exception {
TestObjectFactory.cleanUp(center);
StaticHibernateUtil.closeSession();
super.tearDown();
}
public void testExecute() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("center1_Active_test", meeting);
for (AccountActionDateEntity accountActionDateEntity : center.getCustomerAccount().getAccountActionDates()) {
CustomerAccountBOTestUtils.setActionDate(accountActionDateEntity, offSetDate(accountActionDateEntity
.getActionDate(), 1));
}
meeting = center.getCustomerMeeting().getMeeting();
Calendar calendar = new GregorianCalendar();
calendar.setTime(offSetDate(new Date(System.currentTimeMillis()), 1));
meeting.setMeetingStartDate(calendar.getTime());
meeting.getMeetingDetails().getMeetingRecurrence().setWeekDay(
new WeekDaysEntity(WeekDay
.getWeekDay(Short.valueOf(String.valueOf(calendar.get(Calendar.DAY_OF_WEEK))))));
Set<AccountFeesEntity> accountFeeSet = center.getCustomerAccount().getAccountFees();
FeeBO trainingFee = TestObjectFactory.createPeriodicAmountFee("Training_Fee", FeeCategory.ALLCUSTOMERS, "100",
RecurrenceType.WEEKLY, Short.valueOf("2"));
AccountFeesEntity accountPeriodicFee = new AccountFeesEntity(center.getCustomerAccount(), trainingFee,
((AmountFeeBO) trainingFee).getFeeAmount().getAmountDoubleValue());
accountFeeSet.add(accountPeriodicFee);
Date lastAppliedFeeDate = offSetDate(new Date(System.currentTimeMillis()), 1);
Assert.assertEquals(2, accountFeeSet.size());
for (Object element : accountFeeSet) {
AccountFeesEntity accountFeesEntity = (AccountFeesEntity) element;
accountFeesEntity.setLastAppliedDate(offSetDate(new Date(System.currentTimeMillis()), 1));
}
TestObjectFactory.updateObject(center);
TestObjectFactory.flushandCloseSession();
center = TestObjectFactory.getCenter(center.getCustomerId());
ApplyCustomerFeeTask applyCustomerFeeTask = new ApplyCustomerFeeTask();
ApplyCustomerFeeHelper customerFeeHelper = (ApplyCustomerFeeHelper) applyCustomerFeeTask.getTaskHelper();
customerFeeHelper.execute(System.currentTimeMillis());
TestObjectFactory.flushandCloseSession();
center = TestObjectFactory.getCenter(center.getCustomerId());
Set<AccountFeesEntity> periodicFeeSet = center.getCustomerAccount().getAccountFees();
for (AccountFeesEntity periodicFees : periodicFeeSet) {
if (periodicFees.getFees().getFeeName().equalsIgnoreCase("Training_Fee")) {
Assert.assertEquals(lastAppliedFeeDate, DateUtils.getDateWithoutTimeStamp(periodicFees.getLastAppliedDate()
.getTime()));
} else {
Assert.assertEquals(DateUtils.getDateWithoutTimeStamp(offSetDate(lastAppliedFeeDate, -7).getTime()), DateUtils
.getDateWithoutTimeStamp(periodicFees.getLastAppliedDate().getTime()));
}
}
}
public void testExecuteToApplyPeriodicFee() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_SECOND_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("center1_Active_test", meeting);
for (AccountActionDateEntity accountActionDateEntity : center.getCustomerAccount().getAccountActionDates()) {
CustomerAccountBOTestUtils.setActionDate(accountActionDateEntity, offSetDate(accountActionDateEntity
.getActionDate(), 1));
}
meeting = center.getCustomerMeeting().getMeeting();
Calendar calendar = new GregorianCalendar();
calendar.setTime(offSetDate(new Date(System.currentTimeMillis()), 1));
meeting.setMeetingStartDate(calendar.getTime());
meeting.getMeetingDetails().getMeetingRecurrence().setWeekDay(
new WeekDaysEntity(WeekDay
.getWeekDay(Short.valueOf(String.valueOf(calendar.get(Calendar.DAY_OF_WEEK))))));
Set<AccountFeesEntity> accountFeeSet = center.getCustomerAccount().getAccountFees();
FeeBO trainingFee = TestObjectFactory.createPeriodicAmountFee("Training_Fee", FeeCategory.LOAN, "100",
RecurrenceType.WEEKLY, Short.valueOf("1"));
AccountFeesEntity accountPeriodicFee = new AccountFeesEntity(center.getCustomerAccount(), trainingFee,
((AmountFeeBO) trainingFee).getFeeAmount().getAmountDoubleValue());
accountPeriodicFee.setLastAppliedDate(offSetDate(new Date(System.currentTimeMillis()), 1));
accountFeeSet.add(accountPeriodicFee);
Assert.assertEquals(2, accountFeeSet.size());
TestObjectFactory.updateObject(center);
TestObjectFactory.flushandCloseSession();
center = TestObjectFactory.getCenter(center.getCustomerId());
ApplyCustomerFeeHelper customerFeeHelper = new ApplyCustomerFeeHelper(new ApplyCustomerFeeTask());
customerFeeHelper.execute(System.currentTimeMillis());
TestObjectFactory.flushandCloseSession();
center = TestObjectFactory.getCenter(center.getCustomerId());
Date lastAppliedFeeDate = null;
for (AccountActionDateEntity accountActionDateEntity : center.getCustomerAccount().getAccountActionDates()) {
CustomerScheduleEntity customerScheduleEntity = (CustomerScheduleEntity) accountActionDateEntity;
if (customerScheduleEntity.getInstallmentId().equals(Short.valueOf("2"))) {
lastAppliedFeeDate = customerScheduleEntity.getActionDate();
Assert.assertEquals(2, customerScheduleEntity.getAccountFeesActionDetails().size());
for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : customerScheduleEntity
.getAccountFeesActionDetails()) {
if (accountFeesActionDetailEntity.getFee().getFeeName().equalsIgnoreCase("Training_Fee")) {
Assert.assertEquals(new Money(getCurrency(), "200.0"), accountFeesActionDetailEntity.getFeeAmount());
} else if (accountFeesActionDetailEntity.getFee().getFeeName().equalsIgnoreCase("Maintenance Fee")) {
Assert.assertEquals(new Money(getCurrency(), "200.0"), accountFeesActionDetailEntity.getFeeAmount());
}
}
}
}
for (CustomerActivityEntity customerActivityEntity : center.getCustomerAccount().getCustomerActivitDetails()) {
Assert.assertEquals(new Money(getCurrency(), "200.0"), customerActivityEntity.getAmount());
}
Set<AccountFeesEntity> periodicFeeSet = center.getCustomerAccount().getAccountFees();
for (AccountFeesEntity periodicFees : periodicFeeSet) {
if (periodicFees.getFees().getFeeName().equalsIgnoreCase("Training_Fee")) {
Assert.assertEquals(lastAppliedFeeDate, DateUtils.getDateWithoutTimeStamp(periodicFees.getLastAppliedDate()
.getTime()));
}
}
}
public void testExecuteTask() throws PersistenceException, BatchJobException {
ApplyCustomerFeeTask applyCustomerFeeTask = new ApplyCustomerFeeTask();
applyCustomerFeeTask.name = "ApplyCustomerFeeTask";
ApplyCustomerFeeHelper applyCustomerFeeHelper = (ApplyCustomerFeeHelper) applyCustomerFeeTask.getTaskHelper();
applyCustomerFeeHelper.executeTask();
Query query = StaticHibernateUtil.getSessionTL().createQuery("from " + Task.class.getName());
List<Task> tasks = query.list();
Assert.assertNotNull(tasks);
Assert.assertEquals(1, tasks.size());
for (Task task : tasks) {
Assert.assertEquals(TaskStatus.COMPLETE.getValue().shortValue(), task.getStatus());
Assert.assertEquals("ApplyCustomerFeeTask", task.getTask());
Assert.assertEquals(SchedulerConstants.FINISHED_SUCCESSFULLY, task.getDescription());
TestObjectFactory.removeObject(task);
}
}
public void testExecuteTaskAndForceException() throws PersistenceException, BatchJobException {
ApplyCustomerFeeTask applyCustomerFeeTask = new ApplyCustomerFeeTask();
applyCustomerFeeTask.name = "ApplyCustomerFeeTask";
ApplyCustomerFeeHelper applyCustomerFeeHelper = (ApplyCustomerFeeHelper) applyCustomerFeeTask.getTaskHelper();
AccountPersistence accountPersistenceMock = createMock(AccountPersistence.class);
expect(accountPersistenceMock.getAccountsWithYesterdaysInstallment()).andThrow(
new PersistenceException("mock exception"));
replay(accountPersistenceMock);
applyCustomerFeeHelper.setAccountPersistence(accountPersistenceMock);
applyCustomerFeeHelper.executeTask();
Query query = StaticHibernateUtil.getSessionTL().createQuery("from " + Task.class.getName());
List<Task> tasks = query.list();
Assert.assertNotNull(tasks);
Assert.assertEquals(1, tasks.size());
for (Task task : tasks) {
Assert.assertEquals(TaskStatus.FAILED.getValue().shortValue(), task.getStatus());
Assert.assertEquals("ApplyCustomerFeeTask", task.getTask());
TestObjectFactory.removeObject(task);
}
}
public void testExecuteFailure() {
ApplyCustomerFeeTask applyCustomerFeeTask = new ApplyCustomerFeeTask();
applyCustomerFeeTask.name = "ApplyCustomerFeeTask";
ApplyCustomerFeeHelper applyCustomerFeeHelper = new ApplyCustomerFeeHelper(applyCustomerFeeTask);
TestObjectFactory.simulateInvalidConnection();
applyCustomerFeeHelper.executeTask();
StaticHibernateUtil.closeSession();
Query query = StaticHibernateUtil.getSessionTL().createQuery("from " + Task.class.getName());
List<Task> tasks = query.list();
Assert.assertEquals(0, tasks.size());
}
private java.sql.Date offSetDate(Date date, int noOfDays) {
Calendar calendar = new GregorianCalendar();
calendar.setTime(date);
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH);
int day = calendar.get(Calendar.DAY_OF_MONTH);
calendar = new GregorianCalendar(year, month, day - noOfDays);
return new java.sql.Date(calendar.getTimeInMillis());
}
}
| |
package com.mitchbarry.android.whoisit.ui;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager.LoaderCallbacks;
import android.support.v4.content.Loader;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.mitchbarry.android.whoisit.R;
import com.mitchbarry.android.whoisit.R.id;
import com.mitchbarry.android.whoisit.R.layout;
import com.github.kevinsawicki.wishlist.SingleTypeAdapter;
import com.github.kevinsawicki.wishlist.Toaster;
import com.github.kevinsawicki.wishlist.ViewUtils;
import java.util.Collections;
import java.util.List;
/**
* Base fragment for displaying a list of items that loads with a progress bar
* visible
*
* @param <E>
*/
public abstract class ItemListFragment<E> extends Fragment
implements LoaderCallbacks<List<E>> {
private static final String FORCE_REFRESH = "forceRefresh";
/**
* @param args
* bundle passed to the loader by the LoaderManager
* @return true if the bundle indicates a requested forced refresh of the
* items
*/
protected static boolean isForceRefresh(Bundle args) {
return args != null && args.getBoolean(FORCE_REFRESH, false);
}
/**
* List items provided to {@link #onLoadFinished(Loader, List)}
*/
protected List<E> items = Collections.emptyList();
/**
* List view
*/
protected ListView listView;
/**
* Empty view
*/
protected TextView emptyView;
/**
* Progress bar
*/
protected ProgressBar progressBar;
/**
* Is the list currently shown?
*/
protected boolean listShown;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (!items.isEmpty())
setListShown(true, false);
Bundle extras = getArguments();
getLoaderManager().initLoader(0, extras, this);
}
@Override
public void onResume() {
super.onResume();
forceRefresh();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(layout.item_list, null);
}
/**
* Detach from list view.
*/
@Override
public void onDestroyView() {
listShown = false;
emptyView = null;
progressBar = null;
listView = null;
super.onDestroyView();
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
listView = (ListView) view.findViewById(android.R.id.list);
listView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
onListItemClick((ListView) parent, view, position, id);
}
});
listView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
onListItemLongClick(parent, view, position, id);
return true;
}
});
progressBar = (ProgressBar) view.findViewById(id.pb_loading);
emptyView = (TextView) view.findViewById(android.R.id.empty);
configureList(getActivity(), getListView());
}
/**
* Configure list after view has been created
*
* @param activity
* @param listView
*/
protected void configureList(Activity activity, ListView listView) {
listView.setAdapter(createAdapter());
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@Override
public void onCreateOptionsMenu(Menu optionsMenu, MenuInflater inflater) {
inflater.inflate(R.menu.list_menu, optionsMenu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (!isUsable())
return false;
switch (item.getItemId()) {
case id.refresh:
forceRefresh();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Force a refresh of the items displayed ignoring any cached items
*/
protected void forceRefresh() {
Bundle bundle = getArguments();
if (bundle == null)
bundle = new Bundle();
bundle.putBoolean(FORCE_REFRESH, true);
refresh(bundle);
}
/**
* Refresh the fragment's list
*/
public void refresh() {
refresh(null);
}
private void refresh(final Bundle args) {
if (!isUsable())
return;
getActionBarActivity().setSupportProgressBarIndeterminateVisibility(true);
getLoaderManager().restartLoader(0, args, this);
}
private ActionBarActivity getActionBarActivity() {
return ((ActionBarActivity)getActivity());
}
/**
* Get error message to display for exception
*
* @param exception
* @return string resource id
*/
protected abstract int getErrorMessage(Exception exception);
public void onLoadFinished(Loader<List<E>> loader, List<E> items) {
getActionBarActivity().setSupportProgressBarIndeterminateVisibility(false);
Exception exception = getException(loader);
if (exception != null) {
showError(getErrorMessage(exception));
showList();
return;
}
this.items = items;
getListAdapter().getWrappedAdapter().setItems(items.toArray());
showList();
}
/**
* Create adapter to display items
*
* @return adapter
*/
protected HeaderFooterListAdapter<SingleTypeAdapter<E>> createAdapter() {
SingleTypeAdapter<E> wrapped = createAdapter(items);
return new HeaderFooterListAdapter<SingleTypeAdapter<E>>(getListView(),
wrapped);
}
/**
* Create adapter to display items
*
* @param items
* @return adapter
*/
protected abstract SingleTypeAdapter<E> createAdapter(final List<E> items);
/**
* Set the list to be shown
*/
protected void showList() {
setListShown(true, isResumed());
}
@Override
public void onLoaderReset(Loader<List<E>> loader) {
// Intentionally left blank
}
/**
* Show exception in a Toast
*
* @param message
*/
protected void showError(final int message) {
Toaster.showLong(getActivity(), message);
}
/**
* Get exception from loader if it provides one by being a
* {@link ThrowableLoader}
*
* @param loader
* @return exception or null if none provided
*/
protected Exception getException(final Loader<List<E>> loader) {
if (loader instanceof ThrowableLoader)
return ((ThrowableLoader<List<E>>) loader).clearException();
else
return null;
}
/**
* Refresh the list with the progress bar showing
*/
protected void refreshWithProgress() {
items.clear();
setListShown(false);
refresh();
}
/**
* Get {@link ListView}
*
* @return listView
*/
public ListView getListView() {
return listView;
}
/**
* Get list adapter
*
* @return list adapter
*/
@SuppressWarnings("unchecked")
protected HeaderFooterListAdapter<SingleTypeAdapter<E>> getListAdapter() {
if (listView != null)
return (HeaderFooterListAdapter<SingleTypeAdapter<E>>) listView
.getAdapter();
else
return null;
}
/**
* Set list adapter to use on list view
*
* @param adapter
* @return this fragment
*/
protected ItemListFragment<E> setListAdapter(final ListAdapter adapter) {
if (listView != null)
listView.setAdapter(adapter);
return this;
}
private ItemListFragment<E> fadeIn(final View view, final boolean animate) {
if (view != null)
if (animate)
view.startAnimation(AnimationUtils.loadAnimation(getActivity(),
android.R.anim.fade_in));
else
view.clearAnimation();
return this;
}
private ItemListFragment<E> show(final View view) {
ViewUtils.setGone(view, false);
return this;
}
private ItemListFragment<E> hide(final View view) {
ViewUtils.setGone(view, true);
return this;
}
/**
* Set list shown or progress bar show
*
* @param shown
* @return this fragment
*/
public ItemListFragment<E> setListShown(final boolean shown) {
return setListShown(shown, true);
}
/**
* Set list shown or progress bar show
*
* @param shown
* @param animate
* @return this fragment
*/
public ItemListFragment<E> setListShown(final boolean shown,
final boolean animate) {
if (!isUsable())
return this;
if (shown == listShown) {
if (shown)
// List has already been shown so hide/show the empty view with
// no fade effect
if (items.isEmpty())
hide(listView).show(emptyView);
else
hide(emptyView).show(listView);
return this;
}
listShown = shown;
if (shown)
if (!items.isEmpty())
hide(progressBar).hide(emptyView).fadeIn(listView, animate)
.show(listView);
else
hide(progressBar).hide(listView).fadeIn(emptyView, animate)
.show(emptyView);
else
hide(listView).hide(emptyView).fadeIn(progressBar, animate)
.show(progressBar);
return this;
}
/**
* Set empty text on list fragment
*
* @param message
* @return this fragment
*/
protected ItemListFragment<E> setEmptyText(final String message) {
if (emptyView != null)
emptyView.setText(message);
return this;
}
/**
* Set empty text on list fragment
*
* @param resId
* @return this fragment
*/
protected ItemListFragment<E> setEmptyText(final int resId) {
if (emptyView != null)
emptyView.setText(resId);
return this;
}
/**
* Callback when a list view item is clicked
*
* @param l
* @param v
* @param position
* @param id
*/
public void onListItemClick(ListView l, View v, int position, long id) {
}
/**
* Callback when a list view item is long clicked
* @param parent
* @param v
* @param position
* @param id
*/
public void onListItemLongClick(AdapterView<?> parent, View v, int position, long id) {
}
/**
* Is this fragment still part of an activity and usable from the UI-thread?
*
* @return true if usable on the UI-thread, false otherwise
*/
protected boolean isUsable() {
return getActivity() != null;
}
}
| |
/*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.ui.repo;
import static android.content.Intent.FLAG_ACTIVITY_CLEAR_TOP;
import static android.content.Intent.FLAG_ACTIVITY_SINGLE_TOP;
import static com.github.mobile.Intents.EXTRA_POSITION;
import static com.github.mobile.Intents.EXTRA_REPOSITORY;
import static com.github.mobile.ResultCodes.RESOURCE_CHANGED;
import static com.github.mobile.ui.repo.RepositoryPagerAdapter.ITEM_CODE;
import android.app.SearchManager;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ProgressBar;
import com.github.kevinsawicki.wishlist.ViewUtils;
import com.github.mobile.Intents.Builder;
import com.github.mobile.R;
import com.github.mobile.core.repo.ForkRepositoryTask;
import com.github.mobile.core.repo.RefreshRepositoryTask;
import com.github.mobile.core.repo.RepositoryUtils;
import com.github.mobile.core.repo.StarRepositoryTask;
import com.github.mobile.core.repo.StarredRepositoryTask;
import com.github.mobile.core.repo.UnstarRepositoryTask;
import com.github.mobile.ui.TabPagerActivity;
import com.github.mobile.ui.UriLauncherActivity;
import com.github.mobile.ui.user.UserViewActivity;
import com.github.mobile.util.AvatarLoader;
import com.github.mobile.util.ShareUtils;
import com.github.mobile.util.ToastUtils;
import com.github.mobile.util.TypefaceUtils;
import com.google.inject.Inject;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.User;
/**
* Activity to view a repository
*/
public class RepositoryViewActivity extends TabPagerActivity<RepositoryPagerAdapter> {
/**
* Create intent for this activity
*
* @param repository
* @return intent
*/
public static Intent createIntent(Repository repository) {
return new Builder("repo.VIEW").repo(repository).toIntent();
}
/**
* Create intent for this activity and open the issues tab
*
* @param repository
* @return intent
*/
public static Intent createIntentForIssues(Repository repository) {
return new Builder("repo.VIEW").repo(repository).add(EXTRA_POSITION, 3).toIntent();
}
private Repository repository;
@Inject
private AvatarLoader avatars;
private ProgressBar loadingBar;
private boolean isStarred;
private boolean starredStatusChecked;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
repository = getSerializableExtra(EXTRA_REPOSITORY);
loadingBar = finder.find(R.id.pb_loading);
User owner = repository.getOwner();
ActionBar actionBar = getSupportActionBar();
actionBar.setTitle(repository.getName());
actionBar.setSubtitle(owner.getLogin());
actionBar.setDisplayHomeAsUpEnabled(true);
if (owner.getAvatarUrl() != null && RepositoryUtils.isComplete(repository))
configurePager();
else {
avatars.bind(getSupportActionBar(), owner);
ViewUtils.setGone(loadingBar, false);
setGone(true);
new RefreshRepositoryTask(this, repository) {
@Override
protected void onSuccess(Repository fullRepository) throws Exception {
super.onSuccess(fullRepository);
repository = fullRepository;
getIntent().putExtra(EXTRA_REPOSITORY, repository);
configurePager();
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_repo_load);
ViewUtils.setGone(loadingBar, true);
}
}.execute();
}
}
@Override
public boolean onCreateOptionsMenu(Menu optionsMenu) {
getMenuInflater().inflate(R.menu.repository, optionsMenu);
return super.onCreateOptionsMenu(optionsMenu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem followItem = menu.findItem(R.id.m_star);
followItem.setVisible(starredStatusChecked);
followItem.setTitle(isStarred ? R.string.unstar : R.string.star);
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onSearchRequested() {
if (pager.getCurrentItem() == 1) {
Bundle args = new Bundle();
args.putSerializable(EXTRA_REPOSITORY, repository);
startSearch(null, false, args, false);
return true;
} else
return false;
}
@Override
public void startActivity(Intent intent) {
// Inject extra information into search intents
// Search intents will probably come from the "Issues" fragment
if (Intent.ACTION_SEARCH.equals(intent.getAction())) {
Bundle bundle = new Bundle();
bundle.putSerializable(EXTRA_REPOSITORY, repository);
intent.putExtra(SearchManager.APP_DATA, bundle);
}
super.startActivity(intent);
}
@Override
public void onBackPressed() {
if (adapter == null || pager.getCurrentItem() != ITEM_CODE || !adapter.onBackPressed())
super.onBackPressed();
}
private void configurePager() {
avatars.bind(getSupportActionBar(), repository.getOwner());
configureTabPager();
ViewUtils.setGone(loadingBar, true);
setGone(false);
checkStarredRepositoryStatus();
int initialPosition = getIntExtra(EXTRA_POSITION);
if (initialPosition != -1) {
pager.setItem(initialPosition);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.m_star:
starRepository();
return true;
case R.id.m_fork:
forkRepository();
return true;
case R.id.m_contributors:
startActivity(RepositoryContributorsActivity.createIntent(repository));
return true;
case R.id.m_share:
shareRepository();
return true;
case R.id.m_refresh:
checkStarredRepositoryStatus();
return super.onOptionsItemSelected(item);
case R.id.m_open_browser:
Uri repoUri = Uri.parse("https://github.com/" + repository.generateId());
UriLauncherActivity.launchUriInBrowser(this, repoUri);
return true;
case android.R.id.home:
finish();
Intent intent = UserViewActivity.createIntent(repository.getOwner());
intent.addFlags(FLAG_ACTIVITY_CLEAR_TOP | FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onDialogResult(int requestCode, int resultCode, Bundle arguments) {
adapter.onDialogResult(pager.getCurrentItem(), requestCode, resultCode, arguments);
}
@Override
protected RepositoryPagerAdapter createAdapter() {
return new RepositoryPagerAdapter(this, repository.isHasIssues());
}
@Override
protected int getContentView() {
return R.layout.tabbed_progress_pager;
}
@Override
protected String getIcon(int position) {
switch (position) {
case 0:
return TypefaceUtils.ICON_RSS;
case 1:
return TypefaceUtils.ICON_FILE_CODE;
case 2:
return TypefaceUtils.ICON_GIT_COMMIT;
case 3:
return TypefaceUtils.ICON_ISSUE_OPENED;
default:
return super.getIcon(position);
}
}
private void starRepository() {
if (isStarred)
new UnstarRepositoryTask(this, repository) {
@Override
protected void onSuccess(Void v) throws Exception {
super.onSuccess(v);
isStarred = !isStarred;
setResult(RESOURCE_CHANGED);
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_unstarring_repository);
}
}.start();
else
new StarRepositoryTask(this, repository) {
@Override
protected void onSuccess(Void v) throws Exception {
super.onSuccess(v);
isStarred = !isStarred;
setResult(RESOURCE_CHANGED);
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_starring_repository);
}
}.start();
}
private void checkStarredRepositoryStatus() {
starredStatusChecked = false;
new StarredRepositoryTask(this, repository) {
@Override
protected void onSuccess(Boolean watching) throws Exception {
super.onSuccess(watching);
isStarred = watching;
starredStatusChecked = true;
invalidateOptionsMenu();
}
}.execute();
}
private void shareRepository() {
String repoUrl = repository.getHtmlUrl();
if (TextUtils.isEmpty(repoUrl))
repoUrl = "https://github.com/" + repository.generateId();
Intent sharingIntent = ShareUtils.create(repository.generateId(), repoUrl);
startActivity(sharingIntent);
}
private void forkRepository() {
new ForkRepositoryTask(this, repository) {
@Override
protected void onSuccess(Repository e) throws Exception {
super.onSuccess(e);
if (e != null) {
UriLauncherActivity.launchUri(getContext(), Uri.parse(e.getHtmlUrl()));
} else {
ToastUtils.show(RepositoryViewActivity.this, R.string.error_forking_repository);
}
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_forking_repository);
}
}.start();
}
}
| |
/**
* Copyright (c) 2010-2016 Yahoo! Inc., 2017 YCSB contributors All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package site.ycsb.measurements;
import site.ycsb.measurements.exporter.MeasurementsExporter;
import org.HdrHistogram.Histogram;
import org.HdrHistogram.HistogramIterationValue;
import org.HdrHistogram.HistogramLogWriter;
import org.HdrHistogram.Recorder;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* Take measurements and maintain a HdrHistogram of a given metric, such as READ LATENCY.
*
*/
public class OneMeasurementHdrHistogram extends OneMeasurement {
// we need one log per measurement histogram
private final PrintStream log;
private final HistogramLogWriter histogramLogWriter;
private final Recorder histogram;
private Histogram totalHistogram;
/**
* The name of the property for deciding what percentile values to output.
*/
public static final String PERCENTILES_PROPERTY = "hdrhistogram.percentiles";
/**
* The default value for the hdrhistogram.percentiles property.
*/
public static final String PERCENTILES_PROPERTY_DEFAULT = "95,99";
/**
* The name of the property for determining if we should print out the buckets.
*/
public static final String VERBOSE_PROPERTY = "measurement.histogram.verbose";
/**
* Whether or not to emit the histogram buckets.
*/
private final boolean verbose;
private final List<Double> percentiles;
public OneMeasurementHdrHistogram(String name, Properties props) {
super(name);
percentiles = getPercentileValues(props.getProperty(PERCENTILES_PROPERTY, PERCENTILES_PROPERTY_DEFAULT));
verbose = Boolean.valueOf(props.getProperty(VERBOSE_PROPERTY, String.valueOf(false)));
boolean shouldLog = Boolean.parseBoolean(props.getProperty("hdrhistogram.fileoutput", "false"));
if (!shouldLog) {
log = null;
histogramLogWriter = null;
} else {
try {
final String hdrOutputFilename = props.getProperty("hdrhistogram.output.path", "") + name + ".hdr";
log = new PrintStream(new FileOutputStream(hdrOutputFilename), false);
} catch (FileNotFoundException e) {
throw new RuntimeException("Failed to open hdr histogram output file", e);
}
histogramLogWriter = new HistogramLogWriter(log);
histogramLogWriter.outputComment("[Logging for: " + name + "]");
histogramLogWriter.outputLogFormatVersion();
long now = System.currentTimeMillis();
histogramLogWriter.outputStartTime(now);
histogramLogWriter.setBaseTime(now);
histogramLogWriter.outputLegend();
}
histogram = new Recorder(3);
}
/**
* It appears latency is reported in micros.
* Using {@link Recorder} to support concurrent updates to histogram.
*/
public void measure(int latencyInMicros) {
histogram.recordValue(latencyInMicros);
}
/**
* This is called from a main thread, on orderly termination.
*/
@Override
public void exportMeasurements(MeasurementsExporter exporter) throws IOException {
// accumulate the last interval which was not caught by status thread
Histogram intervalHistogram = getIntervalHistogramAndAccumulate();
if (histogramLogWriter != null) {
histogramLogWriter.outputIntervalHistogram(intervalHistogram);
// we can close now
log.close();
}
exporter.write(getName(), "Operations", totalHistogram.getTotalCount());
exporter.write(getName(), "AverageLatency(us)", totalHistogram.getMean());
exporter.write(getName(), "MinLatency(us)", totalHistogram.getMinValue());
exporter.write(getName(), "MaxLatency(us)", totalHistogram.getMaxValue());
for (Double percentile : percentiles) {
exporter.write(getName(), ordinal(percentile) + "PercentileLatency(us)",
totalHistogram.getValueAtPercentile(percentile));
}
exportStatusCounts(exporter);
// also export totalHistogram
if (verbose) {
for (HistogramIterationValue v : totalHistogram.recordedValues()) {
int value;
if (v.getValueIteratedTo() > (long)Integer.MAX_VALUE) {
value = Integer.MAX_VALUE;
} else {
value = (int)v.getValueIteratedTo();
}
exporter.write(getName(), Integer.toString(value), (double)v.getCountAtValueIteratedTo());
}
}
}
/**
* This is called periodically from the StatusThread. There's a single
* StatusThread per Client process. We optionally serialize the interval to
* log on this opportunity.
*
* @see site.ycsb.measurements.OneMeasurement#getSummary()
*/
@Override
public String getSummary() {
Histogram intervalHistogram = getIntervalHistogramAndAccumulate();
// we use the summary interval as the histogram file interval.
if (histogramLogWriter != null) {
histogramLogWriter.outputIntervalHistogram(intervalHistogram);
}
DecimalFormat d = new DecimalFormat("#.##");
return "[" + getName() + ": Count=" + intervalHistogram.getTotalCount() + ", Max="
+ intervalHistogram.getMaxValue() + ", Min=" + intervalHistogram.getMinValue() + ", Avg="
+ d.format(intervalHistogram.getMean()) + ", 90=" + d.format(intervalHistogram.getValueAtPercentile(90))
+ ", 99=" + d.format(intervalHistogram.getValueAtPercentile(99)) + ", 99.9="
+ d.format(intervalHistogram.getValueAtPercentile(99.9)) + ", 99.99="
+ d.format(intervalHistogram.getValueAtPercentile(99.99)) + "]";
}
private Histogram getIntervalHistogramAndAccumulate() {
Histogram intervalHistogram = histogram.getIntervalHistogram();
// add this to the total time histogram.
if (totalHistogram == null) {
totalHistogram = intervalHistogram;
} else {
totalHistogram.add(intervalHistogram);
}
return intervalHistogram;
}
/**
* Helper method to parse the given percentile value string.
*
* @param percentileString - comma delimited string of Integer values
* @return An Integer List of percentile values
*/
private List<Double> getPercentileValues(String percentileString) {
List<Double> percentileValues = new ArrayList<>();
try {
for (String rawPercentile : percentileString.split(",")) {
percentileValues.add(Double.parseDouble(rawPercentile));
}
} catch (Exception e) {
// If the given hdrhistogram.percentiles value is unreadable for whatever reason,
// then calculate and return the default set.
System.err.println("[WARN] Couldn't read " + PERCENTILES_PROPERTY + " value: '" + percentileString +
"', the default of '" + PERCENTILES_PROPERTY_DEFAULT + "' will be used.");
e.printStackTrace();
return getPercentileValues(PERCENTILES_PROPERTY_DEFAULT);
}
return percentileValues;
}
/**
* Helper method to find the ordinal of any number. eg 1 -> 1st
* @param i number
* @return ordinal string
*/
private String ordinal(Double i) {
String[] suffixes = new String[]{"th", "st", "nd", "rd", "th", "th", "th", "th", "th", "th"};
Integer j = i.intValue();
if (i % 1 == 0) {
switch (j % 100) {
case 11:
case 12:
case 13:
return j + "th";
default:
return j + suffixes[j % 10];
}
} else {
return i.toString();
}
}
}
| |
package fi.nls.oskari.service;
import fi.nls.oskari.domain.GuestUser;
import fi.nls.oskari.domain.Role;
import fi.nls.oskari.domain.User;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.util.PropertyUtil;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Common interface for managing users.
* TODO: this interface is still under development and new methods will propably be added when needed.
*/
public abstract class UserService {
private static final Logger log = LogFactory.getLogger(UserService.class);
private static UserService instance = null;
/**
* Returns a concrete implementation of UserService. Class to be returned is defined with property "oskari.user.service".
* @return
* @throws ServiceException if class cannot be
*/
public static UserService getInstance() throws ServiceException {
if(instance != null) {
return instance;
}
final String className = PropertyUtil.getOptional("oskari.user.service");
if(className == null) {
throw new ServiceException("User service implementation not defined, add 'oskari.user.service' property with a value of fully qualified classname extending " + UserService.class.getCanonicalName());
}
try {
instance = (UserService)Class.forName(className).newInstance();
instance.init();
return instance;
} catch (Exception e) {
throw new ServiceException("Error initializing UserService for classname: "+ className, e);
}
}
/**
* Returns a Guest user. This method should be overridden by implementations to add a Guest role to the user.
* Permission mappings can't be done correctly for guests if they have no roles.
* @return
*/
public User getGuestUser() {
return new GuestUser();
}
/**
* Optional initialize hook. Original init does nothing, so this is just a hook to do initialization on actual service implementations.
* @throws ServiceException should be thrown if something goes wrong on init.
*/
public void init() throws ServiceException {
}
/**
* Optional destroy hook. Original teardown does nothing, so this is just a hook for cleaning up on actual service implementations.
* @throws ServiceException should be thrown if something goes wrong on teardown.
*/
public void teardown() throws ServiceException {
}
/**
* Checks if the user exists in the system.
* @param user username
* @param pass password
* @return logged in user if successful, null if user not found
* @throws ServiceException if anything goes wrong internally.
*/
public abstract User login(String user, String pass) throws ServiceException;
/**
* Inserts role to a user
* @param roleId String
* @return something
* @throws ServiceException if anything goes wrong internally.
*/
public Role insertRole(String roleId) throws ServiceException{
throw new ServiceException("Not Implemented Yet");
}
/**
* Deletes role from a user
* @param roleId String
* @return something
* @throws ServiceException if anything goes wrong internally.
*/
public String deleteRole(int roleId) throws ServiceException{
throw new ServiceException("Not Implemented Yet");
}
/**
* Modifies a users role
* @param roleId String
* @return something
* @throws ServiceException if anything goes wrong internally.
*/
public String modifyRole(String roleId, String userID) throws ServiceException{
throw new ServiceException("Not Implemented Yet");
}
/**
* Returns all roles that exist in the system
* @param platformSpecificParams optional platform specific parameters needed to get/filter roles. If implementation doesnt need any an empty map can be used.
* @return all roles from the system
* @throws ServiceException if anything goes wrong internally.
*/
public abstract Role[] getRoles(Map<Object, Object> platformSpecificParams) throws ServiceException;
/**
* Generates UUID from unique user id
* @param uid string that identifies user
* @return uuid
*/
public String generateUuid(String uid) {
if(uid == null) {
return generateUuid();
}
return UUID.nameUUIDFromBytes(uid.getBytes()).toString();
}
/**
* Generates random UUID
* @return uuid
*/
public String generateUuid() {
return UUID.randomUUID().toString();
}
/**
* Returns all roles that exist in the system. Convenience method for calling getRoles(Map) with empty map
* @return
* @throws ServiceException
*/
public Role[] getRoles() throws ServiceException {
return getRoles(Collections.emptyMap());
}
/**
* Returns all roles that exist in the system. Convenience method for calling getRoles(Map) with empty map
* @return
* @throws ServiceException
*/
public Role getRoleByName(final String name) {
try {
// TODO: maybe some caching for roles?
Role[] roles = getRoles();
for(Role role : roles) {
if(role.getName().equals(name)) {
return role;
}
}
}
catch (Exception ex) {
log.error(ex, "Error getting roles from user service");
}
return null;
}
/**
* Return the user by username. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param username
* @return User user
* @throws ServiceException
*/
public User getUser(String username) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Return the user by id. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param id
* @return User user
* @throws ServiceException
*/
public User getUser(long id) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Return all users. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @return List<User> users
* @throws ServiceException
*/
public List<User> getUsers() throws ServiceException {
return Collections.emptyList();
}
/**
* Return all users. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @return List<User> users
* @throws ServiceException
*/
public List<User> getUsersWithRoles() throws ServiceException {
return Collections.emptyList();
}
/**
* Create a new user. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param user User to be created
* @return User created user
* @throws ServiceException
*/
public User createUser(User user) throws ServiceException {
throw new ServiceException("Not implemented");
}
public User createUser(User user, String[] roleIds) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Modify a user. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param user Modified user
* @return Modified user
* @throws ServiceException
*/
public User modifyUser(User user) throws ServiceException {
throw new ServiceException("Not implemented");
}
public User modifyUserwithRoles(User user, String[] roleIds) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Delete a user. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param id User id
* @throws ServiceException
*/
public void deleteUser(long id) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Set a user's password. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param screenname User name
* @param password User password
* @throws ServiceException
*/
public void setUserPassword(String screenname, String password) throws ServiceException {
throw new ServiceException("Not implemented");
}
/**
* Updates a user's password. This method should be overridden in concrete implementation. The
* default implementation always throws an exception.
* @param screenname User name
* @param password User password
* @throws ServiceException
*/
public void updateUserPassword(String screenname, String password) throws ServiceException {
throw new ServiceException("Not implemented");
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.controller;
import java.util.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.navercorp.pinpoint.common.bo.SpanBo;
import com.navercorp.pinpoint.common.util.DateUtils;
import com.navercorp.pinpoint.web.filter.Filter;
import com.navercorp.pinpoint.web.filter.FilterBuilder;
import com.navercorp.pinpoint.web.service.FilteredMapService;
import com.navercorp.pinpoint.web.service.ScatterChartService;
import com.navercorp.pinpoint.web.util.LimitUtils;
import com.navercorp.pinpoint.web.util.TimeUtils;
import com.navercorp.pinpoint.web.vo.*;
import com.navercorp.pinpoint.web.vo.scatter.Dot;
import com.navercorp.pinpoint.web.vo.scatter.ScatterIndex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.StopWatch;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
/**
* @author netspider
* @author emeroad
*/
@Controller
public class ScatterChartController {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private ScatterChartService scatter;
@Autowired
private FilteredMapService flow;
@Autowired
private FilterBuilder filterBuilder;
private static final String PREFIX_TRANSACTION_ID = "I";
private static final String PREFIX_TIME = "T";
private static final String PREFIX_RESPONSE_TIME = "R";
@Deprecated
@RequestMapping(value = "/scatterpopup", method = RequestMethod.GET)
public String scatterPopup(Model model,
@RequestParam("application") String applicationName,
@RequestParam("from") long from,
@RequestParam("to") long to,
@RequestParam("period") long period,
@RequestParam("usePeriod") boolean usePeriod,
@RequestParam(value = "filter", required = false) String filterText) {
model.addAttribute("applicationName", applicationName);
model.addAttribute("from", from);
model.addAttribute("to", to);
model.addAttribute("period", period);
model.addAttribute("usePeriod", usePeriod);
model.addAttribute("filter", filterText);
return "scatterPopup";
}
/**
* @param applicationName
* @param from
* @param to
* @param limit max number of data return. if the requested data exceed this limit, we need additional calls to
* fetch the rest of the data
* @return
*/
@RequestMapping(value = "/getScatterData", method = RequestMethod.GET)
public ModelAndView getScatterData(
@RequestParam("application") String applicationName,
@RequestParam("from") long from,
@RequestParam("to") long to,
@RequestParam("limit") int limit,
@RequestParam(value = "filter", required = false) String filterText,
@RequestParam(value = "_callback", required = false) String jsonpCallback,
@RequestParam(value = "v", required = false, defaultValue = "2") int version) {
limit = LimitUtils.checkRange(limit);
StopWatch watch = new StopWatch();
watch.start("selectScatterData");
// TODO range check verification exception occurs. "from" is bigger than "to"
final Range range = Range.createUncheckedRange(from, to);
logger.debug("fetch scatter data. {}, LIMIT={}, FILTER={}", range, limit, filterText);
ModelAndView mv;
if (filterText == null) {
mv = selectScatterData(applicationName, range, limit, jsonpCallback, version);
} else {
mv = selectFilterScatterDataData(applicationName, range, filterText, limit, jsonpCallback, version);
}
watch.stop();
logger.info("Fetch scatterData time : {}ms", watch.getLastTaskTimeMillis());
return mv;
}
private ModelAndView selectFilterScatterDataData(String applicationName, Range range, String filterText, int limit, String jsonpCallback, int version) {
final LimitedScanResult<List<TransactionId>> limitedScanResult = flow.selectTraceIdsFromApplicationTraceIndex(applicationName, range, limit);
final List<TransactionId> traceIdList = limitedScanResult.getScanData();
logger.trace("submitted transactionId count={}", traceIdList.size());
// TODO just need sorted? we need range check with tree-based structure.
SortedSet<TransactionId> traceIdSet = new TreeSet<TransactionId>(traceIdList);
logger.debug("unified traceIdSet size={}", traceIdSet.size());
Filter filter = filterBuilder.build(filterText);
List<Dot> scatterData = scatter.selectScatterData(traceIdSet, applicationName, filter);
if (logger.isDebugEnabled()) {
logger.debug("getScatterData range scan(limited:{}) from ~ to:{} ~ {}, limited:{}, filterDataSize:{}",
limit, DateUtils.longToDateStr(range.getFrom()), DateUtils.longToDateStr(range.getTo()), DateUtils.longToDateStr(limitedScanResult.getLimitedTime()), traceIdList.size());
}
Range resultRange;
if (traceIdList.isEmpty()) {
resultRange = new Range(-1, -1);
} else {
resultRange = new Range(limitedScanResult.getLimitedTime(), range.getTo());
}
return createModelAndView(resultRange, jsonpCallback, scatterData, version);
}
private ModelAndView selectScatterData(String applicationName, Range range, int limit, String jsonpCallback, int version) {
final List<Dot> scatterData = scatter.selectScatterData(applicationName, range, limit);
Range resultRange;
if (scatterData.isEmpty()) {
resultRange = new Range(-1, -1);
} else {
resultRange = new Range(scatterData.get(scatterData.size() - 1).getAcceptedTime(), range.getTo());
}
return createModelAndView(resultRange, jsonpCallback, scatterData, version);
}
private ModelAndView createModelAndView(Range range, String jsonpCallback, List<Dot> scatterData, int version) {
ModelAndView mv = new ModelAndView();
mv.addObject("resultFrom", range.getFrom());
mv.addObject("resultTo", range.getTo());
mv.addObject("scatterIndex", ScatterIndex.MATA_DATA);
if(version <= 2) {
mv.addObject("scatter", scatterData);
} else {
final Map<String, List<Dot>> scatterAgentData = new HashMap<String, List<Dot>>();
for(Dot dot : scatterData) {
List<Dot> list = scatterAgentData.get(dot.getAgentId());
if(list == null) {
list = new ArrayList<Dot>();
scatterAgentData.put(dot.getAgentId(), list);
}
list.add(dot);
}
mv.addObject("scatter", scatterAgentData);
}
if (jsonpCallback == null) {
mv.setViewName("jsonView");
} else {
mv.setViewName("jsonpView");
}
return mv;
}
/**
* scatter chart data query for "NOW" button
*
* @param applicationName
* @param limit
* @return
*/
@RequestMapping(value = "/getLastScatterData", method = RequestMethod.GET)
public ModelAndView getLastScatterData(
@RequestParam("application") String applicationName,
@RequestParam("period") long period,
@RequestParam("limit") int limit,
@RequestParam(value = "filter", required = false) String filterText,
@RequestParam(value = "_callback", required = false) String jsonpCallback,
@RequestParam(value = "v", required = false, defaultValue = "1") int version) {
limit = LimitUtils.checkRange(limit);
long to = TimeUtils.getDelayLastTime();
long from = to - period;
// TODO versioning is temporary. to sync template change and server dev
return getScatterData(applicationName, from, to, limit, filterText, jsonpCallback, version);
}
/**
* selected points from scatter chart data query
*
* @param model
* @param request
* @param response
* @return
*/
@RequestMapping(value = "/transactionmetadata", method = RequestMethod.POST)
public String transactionmetadata(Model model, HttpServletRequest request, HttpServletResponse response) {
TransactionMetadataQuery query = parseSelectTransaction(request);
if (query.size() > 0) {
List<SpanBo> metadata = scatter.selectTransactionMetadata(query);
model.addAttribute("metadata", metadata);
}
return "transactionmetadata";
}
private TransactionMetadataQuery parseSelectTransaction(HttpServletRequest request) {
final TransactionMetadataQuery query = new TransactionMetadataQuery();
int index = 0;
while (true) {
final String traceId = request.getParameter(PREFIX_TRANSACTION_ID + index);
final String time = request.getParameter(PREFIX_TIME + index);
final String responseTime = request.getParameter(PREFIX_RESPONSE_TIME + index);
if (traceId == null || time == null || responseTime == null) {
break;
}
query.addQueryCondition(traceId, Long.parseLong(time), Integer.parseInt(responseTime));
index++;
}
logger.debug("query:{}", query);
return query;
}
/**
* transaction list query for selected points in scatter chart
* <p>
* <pre>
* TEST URL = http://localhost:7080/transactionmetadata2.pinpoint?application=FRONT-WEB&from=1394432299032&to=1394433498269&responseFrom=100&responseTo=200&responseOffset=100&limit=10
* </pre>
*
* @param model
* @param request
* @param response
* @return
*/
@RequestMapping(value = "/transactionmetadata2", method = RequestMethod.GET)
public String getTransaction(Model model,
@RequestParam("application") String applicationName,
@RequestParam("from") long from,
@RequestParam("to") long to,
@RequestParam("responseFrom") int responseFrom,
@RequestParam("responseTo") int responseTo,
@RequestParam("limit") int limit,
@RequestParam(value = "offsetTime", required = false, defaultValue = "-1") long offsetTime,
@RequestParam(value = "offsetTransactionId", required = false) String offsetTransactionId,
@RequestParam(value = "offsetTransactionElapsed", required = false, defaultValue = "-1") int offsetTransactionElapsed,
@RequestParam(value = "filter", required = false) String filterText) {
limit = LimitUtils.checkRange(limit);
StopWatch watch = new StopWatch();
watch.start("selectScatterData");
final SelectedScatterArea area = SelectedScatterArea.createUncheckedArea(from, to, responseFrom, responseTo);
logger.debug("fetch scatter data. {}, LIMIT={}, FILTER={}", area, limit, filterText);
if (filterText == null) {
// query data above "limit" first
TransactionId offsetId = null;
List<SpanBo> extraMetadata = null;
if (offsetTransactionId != null) {
offsetId = new TransactionId(offsetTransactionId);
SelectedScatterArea extraArea = SelectedScatterArea.createUncheckedArea(offsetTime, offsetTime, responseFrom, responseTo);
List<Dot> extraAreaDotList = scatter.selectScatterData(applicationName, extraArea, offsetId, offsetTransactionElapsed, limit);
extraMetadata = scatter.selectTransactionMetadata(parseSelectTransaction(extraAreaDotList));
model.addAttribute("extraMetadata", extraMetadata);
}
// query data up to limit
if (extraMetadata == null || extraMetadata.size() < limit) {
int newlimit = limit - ((extraMetadata == null) ? 0 : extraMetadata.size());
List<Dot> selectedDotList = scatter.selectScatterData(applicationName, area, null, -1, newlimit);
List<SpanBo> metadata = scatter.selectTransactionMetadata(parseSelectTransaction(selectedDotList));
model.addAttribute("metadata", metadata);
}
} else {
final LimitedScanResult<List<TransactionId>> limitedScanResult = flow.selectTraceIdsFromApplicationTraceIndex(applicationName, area, limit);
final List<TransactionId> traceIdList = limitedScanResult.getScanData();
logger.trace("submitted transactionId count={}", traceIdList.size());
// TODO: just sorted? we need range check based on tree structure
SortedSet<TransactionId> traceIdSet = new TreeSet<TransactionId>(traceIdList);
logger.debug("unified traceIdSet size={}", traceIdSet.size());
List<Dot> dots = scatter.selectScatterData(traceIdSet, applicationName, filterBuilder.build(filterText));
}
watch.stop();
logger.info("Fetch scatterData time : {}ms", watch.getLastTaskTimeMillis());
return "transactionmetadata2";
}
private TransactionMetadataQuery parseSelectTransaction(List<Dot> dotList) {
TransactionMetadataQuery query = new TransactionMetadataQuery();
if (dotList == null) {
return query;
}
for (Dot dot : dotList) {
query.addQueryCondition(dot.getTransactionId(), dot.getAcceptedTime(), dot.getElapsedTime());
}
logger.debug("query:{}", query);
return query;
}
}
| |
package io.lindstrom.mpd.data;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import io.lindstrom.mpd.data.descriptor.Descriptor;
import io.lindstrom.mpd.support.Utils;
import java.util.List;
import java.util.Objects;
@JsonPropertyOrder({
"framePackings",
"audioChannelConfigurations",
"contentProtections",
"essentialProperties",
"supplementalProperties",
"inbandEventStreams"
})
public abstract class RepresentationBase {
@JacksonXmlProperty(localName = "FramePacking", namespace = MPD.NAMESPACE)
private final List<Descriptor> framePackings;
@JacksonXmlProperty(localName = "AudioChannelConfiguration", namespace = MPD.NAMESPACE)
private final List<Descriptor> audioChannelConfigurations;
@JacksonXmlProperty(localName = "ContentProtection", namespace = MPD.NAMESPACE)
private final List<Descriptor> contentProtections;
@JacksonXmlProperty(localName = "EssentialProperty", namespace = MPD.NAMESPACE)
private final List<Descriptor> essentialProperties;
@JacksonXmlProperty(localName = "SupplementalProperty", namespace = MPD.NAMESPACE)
private final List<Descriptor> supplementalProperties;
@JacksonXmlProperty(localName = "InbandEventStream", namespace = MPD.NAMESPACE)
private final List<EventStream> inbandEventStreams;
@JacksonXmlProperty(isAttribute = true)
private final String profiles;
@JacksonXmlProperty(isAttribute = true)
private final Long width;
@JacksonXmlProperty(isAttribute = true)
private final Long height;
@JacksonXmlProperty(isAttribute = true)
private final Ratio sar;
@JacksonXmlProperty(isAttribute = true)
private final FrameRate frameRate;
@JacksonXmlProperty(isAttribute = true)
private final String audioSamplingRate;
@JacksonXmlProperty(isAttribute = true)
private final String mimeType;
@JacksonXmlProperty(isAttribute = true)
private final String segmentProfiles;
@JacksonXmlProperty(isAttribute = true)
private final String codecs;
@JacksonXmlProperty(isAttribute = true)
private final Double maximumSAPPeriod;
@JacksonXmlProperty(isAttribute = true)
private final Long startWithSAP;
@JacksonXmlProperty(isAttribute = true)
private final Double maxPlayoutRate;
@JacksonXmlProperty(isAttribute = true)
private final Boolean codingDependency;
@JacksonXmlProperty(isAttribute = true)
private final VideoScanType scanType;
RepresentationBase(List<Descriptor> framePackings, List<Descriptor> audioChannelConfigurations, List<Descriptor> contentProtections, List<Descriptor> essentialProperties, List<Descriptor> supplementalProperties, List<EventStream> inbandEventStreams, String profiles, Long width, Long height, Ratio sar, FrameRate frameRate, String audioSamplingRate, String mimeType, String segmentProfiles, String codecs, Double maximumSAPPeriod, Long startWithSAP, Double maxPlayoutRate, Boolean codingDependency, VideoScanType scanType) {
this.framePackings = framePackings;
this.audioChannelConfigurations = audioChannelConfigurations;
this.contentProtections = contentProtections;
this.essentialProperties = essentialProperties;
this.supplementalProperties = supplementalProperties;
this.inbandEventStreams = inbandEventStreams;
this.profiles = profiles;
this.width = width;
this.height = height;
this.sar = sar;
this.frameRate = frameRate;
this.audioSamplingRate = audioSamplingRate;
this.mimeType = mimeType;
this.segmentProfiles = segmentProfiles;
this.codecs = codecs;
this.maximumSAPPeriod = maximumSAPPeriod;
this.startWithSAP = startWithSAP;
this.maxPlayoutRate = maxPlayoutRate;
this.codingDependency = codingDependency;
this.scanType = scanType;
}
RepresentationBase() {
this.framePackings = null;
this.audioChannelConfigurations = null;
this.contentProtections = null;
this.essentialProperties = null;
this.supplementalProperties = null;
this.inbandEventStreams = null;
this.profiles = null;
this.width = null;
this.height = null;
this.sar = null;
this.frameRate = null;
this.audioSamplingRate = null;
this.mimeType = null;
this.segmentProfiles = null;
this.codecs = null;
this.maximumSAPPeriod = null;
this.startWithSAP = null;
this.maxPlayoutRate = null;
this.codingDependency = null;
this.scanType = null;
}
public List<Descriptor> getFramePackings() {
return Utils.unmodifiableList(framePackings);
}
public List<Descriptor> getAudioChannelConfigurations() {
return Utils.unmodifiableList(audioChannelConfigurations);
}
public List<Descriptor> getContentProtections() {
return Utils.unmodifiableList(contentProtections);
}
public List<Descriptor> getEssentialProperties() {
return Utils.unmodifiableList(essentialProperties);
}
public List<Descriptor> getSupplementalProperties() {
return Utils.unmodifiableList(supplementalProperties);
}
public List<EventStream> getInbandEventStreams() {
return Utils.unmodifiableList(inbandEventStreams);
}
public String getProfiles() {
return profiles;
}
public Long getWidth() {
return width;
}
public Long getHeight() {
return height;
}
public Ratio getSar() {
return sar;
}
public FrameRate getFrameRate() {
return frameRate;
}
public String getAudioSamplingRate() {
return audioSamplingRate;
}
public String getMimeType() {
return mimeType;
}
public String getSegmentProfiles() {
return segmentProfiles;
}
public String getCodecs() {
return codecs;
}
public Double getMaximumSAPPeriod() {
return maximumSAPPeriod;
}
public Long getStartWithSAP() {
return startWithSAP;
}
public Double getMaxPlayoutRate() {
return maxPlayoutRate;
}
public Boolean getCodingDependency() {
return codingDependency;
}
public VideoScanType getScanType() {
return scanType;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof RepresentationBase)) return false;
RepresentationBase that = (RepresentationBase) o;
return Objects.equals(framePackings, that.framePackings) &&
Objects.equals(audioChannelConfigurations, that.audioChannelConfigurations) &&
Objects.equals(contentProtections, that.contentProtections) &&
Objects.equals(essentialProperties, that.essentialProperties) &&
Objects.equals(supplementalProperties, that.supplementalProperties) &&
Objects.equals(inbandEventStreams, that.inbandEventStreams) &&
Objects.equals(profiles, that.profiles) &&
Objects.equals(width, that.width) &&
Objects.equals(height, that.height) &&
Objects.equals(sar, that.sar) &&
Objects.equals(frameRate, that.frameRate) &&
Objects.equals(audioSamplingRate, that.audioSamplingRate) &&
Objects.equals(mimeType, that.mimeType) &&
Objects.equals(segmentProfiles, that.segmentProfiles) &&
Objects.equals(codecs, that.codecs) &&
Objects.equals(maximumSAPPeriod, that.maximumSAPPeriod) &&
Objects.equals(startWithSAP, that.startWithSAP) &&
Objects.equals(maxPlayoutRate, that.maxPlayoutRate) &&
Objects.equals(codingDependency, that.codingDependency) &&
scanType == that.scanType;
}
@Override
public int hashCode() {
return Objects.hash(framePackings, audioChannelConfigurations, contentProtections, essentialProperties, supplementalProperties, inbandEventStreams, profiles, width, height, sar, frameRate, audioSamplingRate, mimeType, segmentProfiles, codecs, maximumSAPPeriod, startWithSAP, maxPlayoutRate, codingDependency, scanType);
}
@Override
public String toString() {
return "RepresentationBase{" +
"framePackings=" + framePackings +
", audioChannelConfigurations=" + audioChannelConfigurations +
", contentProtections=" + contentProtections +
", essentialProperties=" + essentialProperties +
", supplementalProperties=" + supplementalProperties +
", inbandEventStreams=" + inbandEventStreams +
", profiles='" + profiles + '\'' +
", width=" + width +
", height=" + height +
", sar='" + sar + '\'' +
", frameRate='" + frameRate + '\'' +
", audioSamplingRate='" + audioSamplingRate + '\'' +
", mimeType='" + mimeType + '\'' +
", segmentProfiles='" + segmentProfiles + '\'' +
", codecs='" + codecs + '\'' +
", maximumSAPPeriod=" + maximumSAPPeriod +
", startWithSAP=" + startWithSAP +
", maxPlayoutRate=" + maxPlayoutRate +
", codingDependency=" + codingDependency +
", scanType=" + scanType +
'}';
}
<T extends AbstractBuilder<T>> T buildUpon(T builder) {
return builder
.withFramePackings(framePackings)
.withAudioChannelConfigurations(audioChannelConfigurations)
.withContentProtections(contentProtections)
.withEssentialProperties(essentialProperties)
.withSupplementalProperties(supplementalProperties)
.withInbandEventStreams(inbandEventStreams)
.withProfiles(profiles)
.withWidth(width)
.withHeight(height)
.withSar(sar)
.withFrameRate(frameRate)
.withAudioSamplingRate(audioSamplingRate)
.withMimeType(mimeType)
.withSegmentProfiles(segmentProfiles)
.withCodecs(codecs)
.withMaximumSAPPeriod(maximumSAPPeriod)
.withStartWithSAP(startWithSAP)
.withMaxPlayoutRate(maxPlayoutRate)
.withCodingDependency(codingDependency)
.withScanType(scanType);
}
static abstract class AbstractBuilder<T> {
List<Descriptor> framePackings;
List<Descriptor> audioChannelConfigurations;
List<Descriptor> contentProtections;
List<Descriptor> essentialProperties;
List<Descriptor> supplementalProperties;
List<EventStream> inbandEventStreams;
String profiles;
Long width;
Long height;
Ratio sar;
FrameRate frameRate;
String audioSamplingRate;
String mimeType;
String segmentProfiles;
String codecs;
Double maximumSAPPeriod;
Long startWithSAP;
Double maxPlayoutRate;
Boolean codingDependency;
VideoScanType scanType;
abstract T getThis();
public T withFramePackings(List<Descriptor> framePackings) {
this.framePackings = framePackings;
return getThis();
}
public T withAudioChannelConfigurations(List<Descriptor> audioChannelConfigurations) {
this.audioChannelConfigurations = audioChannelConfigurations;
return getThis();
}
public T withAudioChannelConfigurations(Descriptor audioChannelConfiguration, Descriptor ...moreAudioChannelConfigurations) {
this.audioChannelConfigurations = Utils.varargsToList(audioChannelConfiguration, moreAudioChannelConfigurations);
return getThis();
}
public T withContentProtections(List<Descriptor> contentProtections) {
this.contentProtections = contentProtections;
return getThis();
}
public T withEssentialProperties(List<Descriptor> essentialProperties) {
this.essentialProperties = essentialProperties;
return getThis();
}
public T withSupplementalProperties(List<Descriptor> supplementalProperties) {
this.supplementalProperties = supplementalProperties;
return getThis();
}
public T withInbandEventStreams(List<EventStream> inbandEventStreams) {
this.inbandEventStreams = inbandEventStreams;
return getThis();
}
public T withProfiles(String profiles) {
this.profiles = profiles;
return getThis();
}
public T withWidth(Long width) {
this.width = width;
return getThis();
}
public T withWidth(int width) {
this.width = (long) width;
return getThis();
}
public T withHeight(Long height) {
this.height = height;
return getThis();
}
public T withHeight(int height) {
this.height = (long) height;
return getThis();
}
public T withSar(Ratio sar) {
this.sar = sar;
return getThis();
}
public T withFrameRate(FrameRate frameRate) {
this.frameRate = frameRate;
return getThis();
}
public T withAudioSamplingRate(String audioSamplingRate) {
this.audioSamplingRate = audioSamplingRate;
return getThis();
}
public T withMimeType(String mimeType) {
this.mimeType = mimeType;
return getThis();
}
public T withSegmentProfiles(String segmentProfiles) {
this.segmentProfiles = segmentProfiles;
return getThis();
}
public T withCodecs(String codecs) {
this.codecs = codecs;
return getThis();
}
public T withMaximumSAPPeriod(Double maximumSAPPeriod) {
this.maximumSAPPeriod = maximumSAPPeriod;
return getThis();
}
public T withStartWithSAP(Long startWithSAP) {
this.startWithSAP = startWithSAP;
return getThis();
}
public T withMaxPlayoutRate(Double maxPlayoutRate) {
this.maxPlayoutRate = maxPlayoutRate;
return getThis();
}
public T withCodingDependency(Boolean codingDependency) {
this.codingDependency = codingDependency;
return getThis();
}
public T withScanType(VideoScanType scanType) {
this.scanType = scanType;
return getThis();
}
}
}
| |
/**
*
*/
package com.att.cloud.so.cloudapi.handlers.attnetwork;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.HttpMethod;
import com.att.cloud.so.cloudapi.messages.AttNetworkType;
import com.att.cloud.so.cloudapi.messages.CaasExtensionApiTypes;
import com.att.cloud.so.cloudapi.messages.LinkType;
import com.att.cloud.so.cloudapi.messages.ObjectFactory;
import com.att.cloud.so.cloudapi.messages.SeverityType;
import com.att.cloud.so.handlers.JAXPipelineHandler;
import com.att.cloud.so.interfaces.nos.processors.VDeviceTypes;
import com.att.cloud.so.interfaces.pipeline.PipelineEvents;
import com.att.cloud.so.interfaces.pipeline.PipelineException;
import com.att.cloud.so.interfaces.pipeline.PipelineMessage;
import com.att.cloud.so.interfaces.pipeline.objects.PipelineNetwork;
import com.att.cloud.so.interfaces.pipeline.objects.PipelineVdc;
import com.att.cloud.so.utils.DBAccess;
import com.att.cloud.so.utils.LogEngine;
import com.att.cloud.so.utils.Tools;
import com.att.cloud.so.utils.ipv6.NetworkTypes;
/**
* @author ks114y [Kva Savitha]
*
*/
/* GET /cloudapi/location/[id-value]/attorg/[id-value]/attnetwork/[id-value] ===> GetAttNetworkType */
public class GetAttNetworkHandler extends JAXPipelineHandler {
int cnId;
String cId;
String siteId, uri;
public void execute(PipelineMessage pmsg) throws PipelineException {
cnId = Integer.parseInt(pmsg.getAttributeAsStr("networkId"));
cId = pmsg.getAttributeAsStr("orgId");
siteId = pmsg.getAttributeAsStr("siteId");
uri = pmsg.getUriAsStr();
AttNetworkType attNetworkType=null;
try {
LogEngine.debug("PipelineMessage: " + pmsg.toString());
attNetworkType=this.getNetworkType(cnId, cId, siteId);
addNetworkLinks(uri, cnId, attNetworkType, cId, pmsg);
attNetworkType.setHref(uri);
attNetworkType.setId(String.valueOf(cnId));
attNetworkType.setType(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_NETWORK_XML.value());
pmsg.addAttribute("attNetworkType", attNetworkType);
} catch (Exception e) {
if(e.getMessage().equalsIgnoreCase("Cannot find the mentioned Id in the Customer Network")){
pmsg.getResponse().addErrmsg(e.getMessage(),HttpServletResponse.SC_NOT_FOUND, SeverityType.FATAL, e.getClass().getName());
}else {
pmsg.getResponse().addErrmsg(e.getMessage(),HttpServletResponse.SC_INTERNAL_SERVER_ERROR, SeverityType.FATAL, e.getClass().getName());
}
LogEngine.logException( e);
this.setAbortPipeline(true);
triggerRollback();
}
}
public AttNetworkType getNetworkType(int networkId, String orgId, String siteId) throws Exception {
PipelineNetwork pipelineNetwork=new PipelineNetwork(networkId, orgId, siteId);
AttNetworkType attNetworkType=pipelineNetwork.getAttnetwork();
return attNetworkType;
}
private static final String GET_VDEVICE_TYPE = "Select * from VDEVICE_TYPES vt where vt.VDEVICECODE = ?";
public int getVdeviceTyepId(String vdeviceCode) throws Exception{
ResultSet rs = null;
DBAccess dba = null;
try {
dba = new DBAccess();
dba.prepareStatement(GET_VDEVICE_TYPE);
dba.setString(1, vdeviceCode);
rs = dba.executeQuery();
if (rs != null && rs.next()){
return rs.getInt("VDEVICETYPEID");
}else{
throw new SQLException ("Unable VDEVICEID for: " + vdeviceCode);
}
} catch (Exception e) {
LogEngine.logException( e);
throw e;
} finally {
if(Tools.isNotEmpty(dba)) {
dba.releasePStmt();
dba.close(rs);
}
}
}
private static final String GET_LOADBALANCER_AND_OPLPOLICIES = "SELECT DISTINCT lba.lb_identifier " +
"FROM customer_network_appliance cna, lb_appliance lba, VDEVICE_TYPES vdt, lb_policies lp " +
"WHERE cna.cnid = ? and cna.applianceid = lba.lbid and cna.VDEVICETYPEID = vdt.VDEVICETYPEID " +
"and vdt.VDEVICECODE = ? and lp.lbid = lba.lbid AND " +
"(lba.FORCEDLB = 'N' OR lba.FORCEDLB = 'Y' and lp.policy.policyName in ('YUM', 'KMS'))";
private static final String GET_LOADBALANCER = "SELECT DISTINCT lba.lb_identifier " +
"FROM customer_network_appliance cna, lb_appliance lba, VDEVICE_TYPES vdt " +
"WHERE cna.cnid = ? and cna.applianceid = lba.lbid and cna.VDEVICETYPEID = vdt.VDEVICETYPEID " +
"and vdt.VDEVICECODE = ? and lba.FORCEDLB = 'N'";
private String getLbIdentifier(int cnId) throws Exception{
ResultSet rs = null;
String lb_Identifier = "";
DBAccess dba = null;
try {
dba = new DBAccess();
if(organizationHasOPLPoliciesInSite(cId, siteId, cnId)){
dba.prepareStatement(GET_LOADBALANCER_AND_OPLPOLICIES);
}else{
dba.prepareStatement(GET_LOADBALANCER);
}
int pos = 1;
dba.setInt(pos++, cnId);
dba.setString(pos++, VDeviceTypes.LOAD_BALANCER.getCode());
rs = dba.executeQuery();
if (rs != null && rs.next()) {
lb_Identifier = rs.getString("lb_identifier");
}
} catch (Exception e) {
LogEngine.logException( e);
throw e;
} finally {
if(Tools.isNotEmpty(dba)) {
dba.releasePStmt();
dba.close(rs);
}
}
return lb_Identifier;
}
private static final String GET_ORGANIZATION_OPL_POLICIES = "select lbp.* from lb_appliance lb, lb_policies lbp where lb.lbid in (select applianceid from customer_network_appliance where VDEVICETYPEID = 3 and cnid in (select cnid from customer_network where orgid = ? and siteid=? and cnid = ?)) and lb.lbid = lbp.lbid and lbp.POLICY.visible = 'N' and lbp.POLICY.policyName in (select bpt.POLICY.policyName from backend_policy_types bpt where bpt.beptype = 'OPL')";
public boolean organizationHasOPLPoliciesInSite(String orgId, String siteId, int cnId) {
/*
* we have to ignore the current network in consideration
*/
DBAccess dba = null;
ResultSet resultSet = null;
try {
dba = new DBAccess();
dba.prepareStatement(GET_ORGANIZATION_OPL_POLICIES);
int p = 1;
dba.setString(p++, orgId);
dba.setString(p++, siteId);
dba.setInt(p++, cnId);
resultSet = dba.executeQuery();
if(resultSet != null && resultSet.next()) {
return true;
}
} catch (Exception e) {
LogEngine.logException( e);
} finally {
if(Tools.isNotEmpty(dba)) {
dba.releasePStmt();
dba.close(resultSet);
}
}
return false;
}
private static final String GET_FIREWALL = "SELECT * from customer_network_appliance cna, fw_appliance fwa, VDEVICE_TYPES vdt where cna.cnid = ? and cna.applianceid = fwa.fwid and cna.VDEVICETYPEID = vdt.VDEVICETYPEID and vdt.VDEVICECODE = ?";
private int getFireWall(int cnId) throws Exception{
ResultSet rs = null;
int fwId = 0;
DBAccess dba = null;
try{
dba = new DBAccess();
dba.prepareStatement(GET_FIREWALL);
int pos = 1;
dba.setInt(pos++, cnId);
dba.setString(pos++, VDeviceTypes.INSIDE_FIREWALL.getCode());
rs=dba.executeQuery();
if (rs != null && rs.next()) {
fwId = rs.getInt("fwid");
}else{
LogEngine.debug("Firewall not found for Network Id: " + cnId);
}
}catch(Exception e){
LogEngine.logException( e);
throw e;
} finally {
if(Tools.isNotEmpty(dba)) {
dba.releasePStmt();
dba.close(rs);
}
}
return fwId;
}
// private static final String GET_VDC = "SELECT orgvdcid from customer_network_appliance cna, lb_appliance lba, VDEVICE_TYPES vdt, vdc where cna.cnid = ? and cna.applianceid = lba.lbid " +
// " and vdc.vdcid = lba.vdcid and cna.VDEVICETYPEID = vdt.VDEVICETYPEID and vdt.VDEVICECODE = ?";
private static final String GET_VDC = "select orgvdcid from customer_network cn, vdc_networks vn, vdc v where vn.vdcid = v.vdcid and vn.netid = cn.ippid and " +
" vn.nettype = cn.nettype and cn.cnid = ? ";
private String getVDC(int cnId) throws Exception{
ResultSet rs = null;
String orgVdcId = "";
DBAccess dba = null;
try{
dba = new DBAccess();
dba.prepareStatement(GET_VDC);
int pos = 1;
dba.setInt(pos++, cnId);
//dba.setString(pos++, VDeviceTypes.LOAD_BALANCER.getCode());
rs=dba.executeQuery();
if (rs != null && rs.next()) {
orgVdcId = rs.getString("orgvdcid");
}else{
LogEngine.debug("VDC not found for Network Id: " + cnId);
}
}catch(Exception e){
LogEngine.logException( e);
throw e;
} finally {
if(Tools.isNotEmpty(dba)) {
dba.releasePStmt();
dba.close(rs);
}
}
return orgVdcId;
}
public AttNetworkType addNetworkLinks(String uri, int networkId, AttNetworkType attNetworkType, String orgId, PipelineMessage pmsg) throws Exception {
// Adding LinkTypes for LoadBalancer, Firewall , VDC and associated network
// GET /cloudapi/location/[id-value]/attorg/[id-value]/attlb/[ID-VALUE]
// GET /cloudapi/location/[id-value]/attorg/{attorg_id}/attvdc/[ID-VALUE]
// GET /cloudapi/location/[id-value]/attorg/{attorg_id}/attfwp/[ID-VALUE]
ObjectFactory factory = new ObjectFactory();
String lbIdentifier = getLbIdentifier(networkId);
if(!Tools.isEmpty(lbIdentifier) && attNetworkType.isUseLb()){
String lbUri = uri.replaceAll("attnetwork.*$", "attlb/");
LinkType lbLinkType = factory.createLinkType();
lbLinkType.setAccept(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_LOAD_BALANCER_XML.value());
lbLinkType.setAction(HttpMethod.GET);
lbLinkType.setHref(lbUri + lbIdentifier);
lbLinkType.setId("" + lbIdentifier);
lbLinkType.setMethod(HttpMethod.GET);
lbLinkType.setType(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_LOAD_BALANCER_XML.value());
attNetworkType.getLinks().add(lbLinkType);
}
int fwId = getFireWall(networkId);
if(fwId != 0){
String fwUri = uri.replaceAll("attnetwork.*$", "attfwp/");
LinkType fwLinkType = factory.createLinkType();
fwLinkType.setAccept(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_FIREWALL_POLICY_XML.value());
fwLinkType.setAction(HttpMethod.GET);
fwLinkType.setHref(fwUri + fwId);
fwLinkType.setId(""+fwId);
fwLinkType.setMethod(HttpMethod.GET);
fwLinkType.setType(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_FIREWALL_POLICY_XML.value());
attNetworkType.getLinks().add(fwLinkType);
}
String orgVdcId = getVDC(networkId);
int associatedNetwork = 0;
PipelineVdc pipelineVdc = new PipelineVdc(orgVdcId, pmsg);
LogEngine.debug( "**GETNETWORKS**" + pipelineVdc.getNetworks());
for (PipelineNetwork pN : pipelineVdc.getNetworks())
{
if (pN.getCnId() != networkId)
associatedNetwork = pN.getCnId();
}
if(!Tools.isEmpty(associatedNetwork)){
if (associatedNetwork != 0){
LinkType associatedNwlinkType = factory.createLinkType();
associatedNwlinkType.setAction(HttpMethod.GET);
associatedNwlinkType.setId(""+associatedNetwork);
associatedNwlinkType.setHref(uri.split("attnetwork")[0] +"attnetwork/"+ associatedNetwork);
associatedNwlinkType.setAccept(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_NETWORK_XML.value());
associatedNwlinkType.setMethod(HttpMethod.GET);
associatedNwlinkType.setType(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_NETWORK_XML.value());
attNetworkType.getLinks().add(associatedNwlinkType);
}
}
if(!Tools.isEmpty(orgVdcId)){
String vdcUri = uri.replaceAll("attnetwork.*$", "attvdc/");
LinkType vdcLinkType = factory.createLinkType();
vdcLinkType.setAccept(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_VDC_XML.value());
vdcLinkType.setAction(HttpMethod.GET);
vdcLinkType.setHref(vdcUri + orgVdcId);
vdcLinkType.setId(""+orgVdcId);
vdcLinkType.setMethod(HttpMethod.GET);
vdcLinkType.setType(CaasExtensionApiTypes.APPLICATION_VND_ATT_SYNAPTIC_CLOUDAPI_ATT_VDC_XML.value());
attNetworkType.getLinks().add(vdcLinkType);
}
return attNetworkType;
}
public PipelineEvents handleEventType() {
return PipelineEvents.SERVICE_ATTNETWORK_GET;
}
public static void main(String[] args){
PipelineMessage pmsg = new PipelineMessage();
pmsg.addAttribute("networkId", "2721");
pmsg.addAttribute("orgId", "999999");
pmsg.addAttribute("siteId", "M400004");
GetAttNetworkHandler netHandler = new GetAttNetworkHandler();
try {
netHandler.execute(pmsg);
} catch (PipelineException e) {
LogEngine.logException( e);
LogEngine.logException( e);
}
}
@Override
public void rollback() {
try {
// Nothing to do here
} catch (Exception e) {
LogEngine.logException( e);
LogEngine.logException( e);
}
}
}
| |
/*<license>
Copyright 2004, PeopleWare n.v.
NO RIGHTS ARE GRANTED FOR THE USE OF THIS SOFTWARE, EXCEPT, IN WRITING,
TO SELECTED PARTIES.
</license>*/
package be.peopleware.persistence_II.hibernate;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.QueryException;
import net.sf.hibernate.Session;
import net.sf.hibernate.Transaction;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import be.peopleware.bean_V.CompoundPropertyException;
import be.peopleware.bean_V.PropertyException;
import be.peopleware.exception_I.TechnicalException;
import be.peopleware.persistence_II.IdNotFoundException;
import be.peopleware.persistence_II.PersistentBean;
import be.peopleware.persistence_II.dao.AsyncCrudDao;
/**
* <p>Asynchronous CRUD functionality with Hibernate. There are no extra
* requirements for {@link PersistentBean}s to be used with Hibernate,
* apart from the definition of <kbd>hbm</kbd> files.</p>
*
* @author Jan Dockx
* @author PeopleWare n.v.
* @invar getRequest() != null;
* @invar getSession() != null;
*
* @todo Exceptions thrown here should be {@link PropertyException PropertyExceptions}
* instead of the more strict {@link CompoundPropertyException}, and should
* allow null origin.
*/
public class HibernateAsyncCrudDao extends AbstractHibernateDao implements AsyncCrudDao {
/* <section name="Meta Information"> */
//------------------------------------------------------------------
/** {@value} */
public static final String CVS_REVISION = "$Revision$";
/** {@value} */
public static final String CVS_DATE = "$Date$";
/** {@value} */
public static final String CVS_STATE = "$State$";
/** {@value} */
public static final String CVS_TAG = "$Name$";
/* </section> */
/* <construction> */
//------------------------------------------------------------------
// default constructor
/* </construction> */
private static final Log LOG = LogFactory.getLog(HibernateAsyncCrudDao.class);
private static final String NULL_SESSION = "Session is null";
private static final String NO_PENDING_TRANSACTION = "No transaction pending";
private static final String PENDING_TRANSACTION = "There is a transaction still pending";
private static final String NO_PERSISTENT_OBJECT = "No persistent object";
private static final String WRONG_SUBTYPE = " not a subtype of PersistentBean";
/*<property name="session">*/
//------------------------------------------------------------------
/**
* @param session
* The hibernate session to use for database manipulations.
* @post isInTransaction();
* @post new.getSession() == session;
* @throws IllegalStateException
* isInTransAction();
*/
public final void setSession(final Session session) throws TechnicalException {
if (isInTransaction()) {
throw new TechnicalException("Cannot set session now, "
+ "transaction still in use", null);
}
super.setSession(session);
}
/*</property>*/
/**
* @invar isInTransaction() == (tx != null);
*/
private Transaction $tx;
/**
* @throws TechnicalException
* isInTransaction()
* || getSession() == null;
*/
public final void startTransaction() throws TechnicalException {
LOG.debug("Starting hibernate transaction ...");
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (isInTransaction()) {
throw new TechnicalException(PENDING_TRANSACTION, null);
}
assert $tx == null;
try {
$tx = getSession().beginTransaction();
setInTransaction(true);
}
catch (HibernateException hExc) {
throw new TechnicalException("Could not create Hibernate transaction",
hExc);
}
LOG.debug("Hibernate transaction started.");
}
/**
* @param pb
* The persitentObject thats needs to be written to the db.
* @throws TechnicalException !
* isInTransaction()
* || pb == null;
* @throws CompoundPropertyException
* If there are some data consitencies in <param>pb</param> that
* are detected by the database, for example: unique constraints
*/
public final void commitTransaction(final PersistentBean pb)
throws CompoundPropertyException, TechnicalException {
LOG.debug("Starting commit ...");
if (!isInTransaction()) {
throw new TechnicalException(NO_PENDING_TRANSACTION, null);
}
if (pb == null) {
throw new TechnicalException(NO_PERSISTENT_OBJECT, null);
}
assert $tx != null;
try {
$tx.commit();
$tx = null;
resetId($deleted);
$deleted = new HashSet();
$created = new HashSet();
setInTransaction(false);
LOG.debug("Commit completed.");
}
catch (HibernateException hExc) {
LOG.debug("Commit failed.", hExc);
/* @idea (jand): it is stupid to have an argument pb for this method;
* it is needed for the exceptions if it is a hibernate exception;
* does the hibernate exception no contain the pb?
*/
handleHibernateException(hExc, "Committing", pb);
}
}
/**
* Reset the id of the {@link PersistentBean PersistentBeans} in
* <code>persistentBeans</code> to <code>null</code>.
*
* @pre persistentBeans != null;
* @pre cC:instanceof(persistentBeans, PersistentBean);
*/
private void resetId(Set persistentBeans) {
assert persistentBeans != null;
Iterator iter = persistentBeans.iterator();
while (iter.hasNext()) {
PersistentBean iterPo = (PersistentBean)iter.next();
iterPo.setId(null);
}
}
/**
* For {@link #isCreated(PersistentBean) created} persistent beans, the
* {@link PersistentBean#getId()} is reset to <code>null</code> (part of rollback).
*
* @throws TechnicalException
* isInTransaction();
*/
public final void cancelTransaction() throws TechnicalException {
LOG.debug("Cancelling transaction.");
if (!isInTransaction()) {
throw new TechnicalException(NO_PENDING_TRANSACTION, null);
}
assert $tx != null;
try {
$tx.rollback();
resetId($created);
// $deleted objects get to keep there original id, as they are not really deleted
}
catch (HibernateException hExc) {
throw new TechnicalException("could not rollback "
+ "Hibernate transaction. "
+ "this is serious.",
hExc);
}
finally {
$tx = null;
setInTransaction(false);
$deleted = new HashSet();
$created = new HashSet();
}
}
/**
* After this method, <code>pb</code> will have an fresh id. Only during commit will
* this <code>pb</code> actually be created in the DB, so if that fails, we need
* to call {@link #cancelTransaction()}. This will reset the id to <code>null</code>.
*
* @post isCreated(pb);
* @throws TechnicalException
* !isInTransaction()
* || getSession() == null
* || pb == null
* || pb.getId() != null;
*/
public final void createPersistentBean(final PersistentBean pb)
throws CompoundPropertyException, TechnicalException {
LOG.debug("Creating new record for bean \"" + pb + "\" ..."); //$NON-NLS-2$
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (!isInTransaction()) {
throw new TechnicalException(NO_PENDING_TRANSACTION, null);
}
if (pb == null) {
throw new TechnicalException(NO_PERSISTENT_OBJECT, null);
}
if (pb.getId() != null) {
throw new TechnicalException("pb cannot have an id",
null);
}
try {
LOG.trace("Gather all beans to be created, taking into account cascade");
List allToBeCreated = relatedFreshPersistentBeans(pb);
// we need to normalize and check all these beans
Iterator iter = allToBeCreated.iterator();
while (iter.hasNext()) {
PersistentBean current = (PersistentBean)iter.next();
LOG.trace("Normalizing \"" + current + "\" and checking civility ...");
current.normalize();
current.checkCivility(); // CompoundPropertyException
// MUDO (jand) package all PropertyExceptions for all beans together; don't stop after one!!!
LOG.trace("\"" + current + "\" checks out ok");
}
getSession().save(pb);
// cascade done by Hibernate; all elements of allToBeCreated are created
// IDEA (jand) by doing the cascade ourselfs, we might be able to get better exceptions
$created.addAll(allToBeCreated);
if (LOG.isDebugEnabled()) {
LOG.debug("Creating succesfull.");
iter = allToBeCreated.iterator();
while (iter.hasNext()) {
PersistentBean current = (PersistentBean)iter.next();
LOG.debug(" generated " + current.getId() + " as id for " + current);
}
}
}
catch (HibernateException hExc) {
LOG.debug("Creation of new record failed.");
handleHibernateException(hExc, "Creating", pb);
}
assert pb.getId() != null;
}
/**
* <code>pb</code> is part of the result
*
* @todo move method static as utility method
* @pre pb != null;
* @pre pb.getId() == null;
*/
private List relatedFreshPersistentBeans(PersistentBean pb) {
assert pb != null;
assert pb.getId() == null;
List result = new LinkedList();
result.add(pb);
int current = 0;
while (current < result.size()) {
PersistentBean currentPb = (PersistentBean)result.get(current);
current++;
PropertyDescriptor[] pds = PropertyUtils.getPropertyDescriptors(currentPb);
for (int i = 0; i < pds.length; i++) {
PersistentBean related = relatedPeristentBean(currentPb, pds[i]);
if ((related != null) && (related.getId() == null) && (! result.contains(related))) {
/* if it is a fresh bean and it is the first time that we encounter it,
* it is to be part of the result;
* we also need to process it further: remember it on the agenda */
result.add(related); // adds at the end of the list; size++
}
}
}
return Collections.unmodifiableList(result);
}
/**
* The value if the property <code>pd</code> of <code>pb</code>, if
* <ul>
* <li>it is readable</li>
* <li>it is a {@link PersistentBean}
* </ul>
* <code>null</code> otherwise 9also if there is an exception reading).
*
* @pre pb != null;
* @pre pd != null;
*/
private PersistentBean relatedPeristentBean(PersistentBean pb, PropertyDescriptor pd) {
assert pb != null;
assert pd != null;
PersistentBean result = null;
if (PersistentBean.class.isAssignableFrom(pd.getPropertyType())) {
Method rm = pd.getReadMethod();
if (rm != null) {
// found a property that returns a related bean; get it
try {
result = (PersistentBean)rm.invoke(pb, null);
}
catch (IllegalArgumentException iaExc) {
assert false : "Should not happen, since there are no " //$NON-NLS-1$
+ "arguments, and the implicit argument is " //$NON-NLS-1$
+ "not null and of the correct type"; //$NON-NLS-1$
}
catch (IllegalAccessException e) {
assert false : "IllegalAccessException should not happen: " + e;
}
catch (InvocationTargetException e) {
assert false : "InvocationTargetException should not happen: " + e;
}
catch (NullPointerException e) {
assert false : "NullPointerException should not happen: " + e;
}
/* ExceptionInInitializerError can occur with invoke, but we do not
take into account errors */
}
}
return result;
}
/**
* @param id
* The ID of the PersistentBean to retrieve
* @param persistentObjectType
* The type of PersistentBean (subclass) to retrieve.
* @throws IdNotFoundException
* No PersistentBean with <param>id</param> of type
* <param>persistentObjectType</param>was found.
* @throws TechnicalException
* getSession() == null
* || id == null
* || persistentObjectType == null
* || !PersistentBean.class
* .isAssignableFrom(persistentObjectType);
*/
public PersistentBean retrievePersistentBean(
final Long id,
final Class persistentObjectType)
throws IdNotFoundException, TechnicalException {
LOG.debug("Retrieving record with id = " + id + " ..."); //$NON-NLS-2$
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (id == null) {
throw new IdNotFoundException(id, "ID_IS_NULL",
null, persistentObjectType);
}
if (persistentObjectType == null) {
throw new TechnicalException(NO_PERSISTENT_OBJECT, null);
}
if (!PersistentBean.class.isAssignableFrom(persistentObjectType)) {
throw new TechnicalException(persistentObjectType.toString()
+ WRONG_SUBTYPE,
null);
}
PersistentBean result = null;
try {
result = (PersistentBean)getSession().get(persistentObjectType, id);
if (result == null) {
LOG.debug("Record not found");
throw new IdNotFoundException(id, null, null, persistentObjectType);
}
// When hibernate caching is active they can give back a object with
// the correct ID but of the wrong type, so this extra check is
// introduced as a workaround for it. A posting was done to the hibernate
// forum to ask if it is a bug or if we are missing something.
//
// URL: http://forum.hibernate.org/viewtopic.php?t=938177
if (!persistentObjectType.isInstance(result)) {
LOG.debug("Incorrect record found (Wrong type");
throw new IdNotFoundException(id, null, null, persistentObjectType);
}
}
catch (ClassCastException ccExc) {
throw new TechnicalException("retrieved object was not a PersistentBean",
ccExc);
}
catch (HibernateException hExc) {
// this cannot be that we did not find an object with that id, since we
// use get
throw new TechnicalException("problem getting record from DB", hExc);
}
assert result != null;
assert result.getId().equals(id);
assert persistentObjectType.isInstance(result);
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieval succeeded (" + result + ")"); //$NON-NLS-2$
}
return result;
}
/**
* @throws TechnicalException
* getSession() == null
* || persistentObjectType == null
* || ! PersistentBean.class.isAssignableFrom(persistentObjectType);
*/
public Set retrieveAllPersistentBeans(final Class persistentObjectType,
final boolean retrieveSubClasses)
throws TechnicalException {
LOG.debug("Retrieving all records of type \"" + persistentObjectType + "\" ..."); //$NON-NLS-2$
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (persistentObjectType == null) {
throw new TechnicalException(
"persistentObjectType cannot be null", null);
}
if (!PersistentBean.class.isAssignableFrom(persistentObjectType)) {
throw new TechnicalException(persistentObjectType.toString()
+ WRONG_SUBTYPE,
null);
}
Set results = new HashSet();
try {
if (retrieveSubClasses) {
results.addAll(getSession().createCriteria(persistentObjectType).list());
}
else {
try {
results.addAll(getSession().createQuery("FROM "
+ persistentObjectType.getName()
+ " as persistentObject WHERE persistentObject.class = "
+ persistentObjectType.getName()).list());
}
catch (QueryException qExc) {
if (qExc.getMessage().matches(
"could not resolve property: class of: .*")) {
results.addAll(getSession().createCriteria(persistentObjectType).list());
}
}
}
}
catch (HibernateException hExc) {
throw new TechnicalException("problem getting all instances of "
+ persistentObjectType.getName(),
hExc);
}
assert results != null;
LOG.debug("Retrieval succeeded (" + results.size() + " objects retrieved)"); //$NON-NLS-2$
return results;
}
/**
* @throws TechnicalException !
* isInTransaction() || pb == null ||
* pb.getId() == null || getSession() == null;
*/
public final void updatePersistentBean(final PersistentBean pb)
throws CompoundPropertyException, TechnicalException {
if (LOG.isDebugEnabled()) {
LOG.debug("Updating bean \"" + pb + "\" ..."); //$NON-NLS-2$
}
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (!isInTransaction()) {
throw new TechnicalException(NO_PENDING_TRANSACTION, null);
}
if (pb == null) {
throw new TechnicalException(NO_PERSISTENT_OBJECT, null);
}
if (pb.getId() == null) {
throw new TechnicalException("pb has no id", null);
}
try {
if (LOG.isTraceEnabled()) {
LOG.trace("Normalizing \"" + pb + "\" ..."); //$NON-NLS-2$
}
pb.normalize();
pb.checkCivility(); // CompoundPropertyException
// MUDO (jand) normalize and checkCivility off all reachable PB's (cascade)
if (LOG.isTraceEnabled()) {
LOG.trace("Normalization of \"" + pb + "\" done."); //$NON-NLS-2$
}
getSession().update(pb);
/*
* If there is a persistent instance with the same identifier, different
* from this pb, an exception is thrown. This cannot happen since pb is
* fresh from the DB: we got it with retrieve or created it ourself.
*/
LOG.debug("Update succeeded.");
}
catch (HibernateException hExc) {
LOG.debug("Update failed.");
handleHibernateException(hExc, "updating", pb);
}
}
/**
* @throws TechnicalException !
* isInTransaction() || pb == null ||
* pb.getId() == null || getSession() == null;
*/
public void deletePersistentBean(final PersistentBean pb)
throws TechnicalException {
LOG.debug("Deleting persistent bean \"" + pb + "\" ..."); //$NON-NLS-2$
if (getSession() == null) {
throw new TechnicalException(NULL_SESSION, null);
}
if (!isInTransaction()) {
throw new TechnicalException(NO_PENDING_TRANSACTION, null);
}
if (pb == null) {
throw new TechnicalException(NO_PERSISTENT_OBJECT, null);
}
if (pb.getId() == null) {
throw new TechnicalException("pb has no id", null);
}
try {
getSession().delete(pb);
$deleted.add(pb);
// MUDO (jand) take into account cascade delete
}
catch (HibernateException hExc) {
LOG.debug("Deletion failed.");
try {
handleHibernateException(hExc, "Deleting", pb);
}
catch (CompoundPropertyException cpExc) {
assert false : "this should possibly become a non-modifiable exception";
}
}
LOG.debug("Deletion succeeded.");
}
/*<property name="created">*/
//------------------------------------------------------------------
/**
* Returns true when the given persistent bean has been created (i.e.,
* has been used as a parameter in {@link #createPersistentBean(PersistentBean)});
* returns false otherwise.
*
* @param pb
* @basic
*/
public boolean isCreated(final PersistentBean pb) {
return $created.contains(pb);
}
/**
* @invar $created != null;
* @invar ! $created.contains(null);
* @invar (forall Object o; $created.contains(o); o instanceof PersistentBean);
*/
private Set $created = new HashSet();
/*</property>*/
/*<property name="deleted">*/
//------------------------------------------------------------------
/**
* Returns true when the given persistent bean has been deleted; returns false
* otherwise.
*
* @param pb
* @basic
*/
public boolean isDeleted(final PersistentBean pb) {
return $deleted.contains(pb);
}
/**
* @invar $deleted != null;
* @invar ! $deleted.contains(null);
* @invar (forall Object o; $deleted.contains(o); o instanceof PersistentBean);
*/
private Set $deleted = new HashSet();
/*</property>*/
/*<property name="inTransaction">*/
//------------------------------------------------------------------
/**
* Returns true when a transaction is open; returns false otherwise.
*
* @basic
*/
public final boolean isInTransaction() {
return $isInTransaction;
}
/**
* Set the given boolean value, reflecting whether a transaction is open
* or not.
*
* @param inTransaction
* @post new.isInTransaction() == inTransaction;
*/
protected final void setInTransaction(final boolean inTransaction) {
$isInTransaction = inTransaction;
}
private boolean $isInTransaction;
/*</property>*/
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.python;
import static org.junit.Assert.assertThat;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.cxx.CxxBinaryBuilder;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPlatformUtils;
import com.facebook.buck.io.AlwaysFoundExecutableFinder;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.AbstractNodeBuilder;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRules;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildableContext;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.coercer.PatternMatchedCollection;
import com.facebook.buck.rules.coercer.SourceList;
import com.facebook.buck.rules.coercer.VersionMatchedCollection;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.rules.keys.RuleKeyFieldLoader;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.shell.ShBinary;
import com.facebook.buck.shell.ShBinaryBuilder;
import com.facebook.buck.step.Step;
import com.facebook.buck.testutil.AllExistingProjectFilesystem;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.RichStream;
import com.facebook.buck.util.cache.DefaultFileHashCache;
import com.facebook.buck.util.cache.StackedFileHashCache;
import com.facebook.buck.versions.Version;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.Optional;
import java.util.regex.Pattern;
public class PythonTestDescriptionTest {
@Test
public void thatTestModulesAreInComponents() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PythonTestBuilder builder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:bin"))
.setSrcs(
SourceList.ofUnnamedSources(
ImmutableSortedSet.of(new FakeSourcePath("blah.py"))));
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PythonTest testRule = builder.build(resolver, filesystem, targetGraph);
PythonBinary binRule = testRule.getBinary();
PythonPackageComponents components = binRule.getComponents();
assertThat(
components.getModules().keySet(),
Matchers.hasItem(PythonTestDescription.getTestModulesListName()));
assertThat(
components.getModules().keySet(),
Matchers.hasItem(PythonTestDescription.getTestMainName()));
assertThat(
binRule.getMainModule(),
Matchers.equalTo(
PythonUtil.toModuleName(
testRule.getBuildTarget(),
PythonTestDescription.getTestMainName().toString())));
}
@Test
public void baseModule() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:test");
String sourceName = "main.py";
SourcePath source = new FakeSourcePath("foo/" + sourceName);
// Run without a base module set and verify it defaults to using the build target
// base name.
PythonTestBuilder normalBuilder =
PythonTestBuilder.create(target)
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(source)));
TargetGraph normalTargetGraph = TargetGraphFactory.newInstance(normalBuilder.build());
PythonTest normal =
normalBuilder.build(
new BuildRuleResolver(
normalTargetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
normalTargetGraph);
assertThat(
normal.getBinary().getComponents().getModules().keySet(),
Matchers.hasItem(target.getBasePath().resolve(sourceName)));
// Run *with* a base module set and verify it gets used to build the main module path.
String baseModule = "blah";
PythonTestBuilder withBaseModuleBuilder =
PythonTestBuilder.create(target)
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(source)))
.setBaseModule(baseModule);
TargetGraph withBaseModuleTargetGraph =
TargetGraphFactory.newInstance(withBaseModuleBuilder.build());
PythonTest withBaseModule =
withBaseModuleBuilder.build(
new BuildRuleResolver(
withBaseModuleTargetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
withBaseModuleTargetGraph);
assertThat(
withBaseModule.getBinary().getComponents().getModules().keySet(),
Matchers.hasItem(Paths.get(baseModule).resolve(sourceName)));
}
@Test
public void buildArgs() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:test");
ImmutableList<String> buildArgs = ImmutableList.of("--some", "--args");
PythonTestBuilder builder =
PythonTestBuilder.create(target)
.setBuildArgs(buildArgs);
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
PythonTest test = builder.build(resolver, filesystem, targetGraph);
PythonBinary binary = test.getBinary();
ImmutableList<Step> buildSteps = binary.getBuildSteps(
FakeBuildContext.withSourcePathResolver(pathResolver), new FakeBuildableContext());
PexStep pexStep =
RichStream.from(buildSteps)
.filter(PexStep.class)
.toImmutableList()
.get(0);
assertThat(
pexStep.getCommandPrefix(),
Matchers.hasItems(buildArgs.toArray(new String[buildArgs.size()])));
}
@Test
public void platformSrcs() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:test");
SourcePath matchedSource = new FakeSourcePath("foo/a.py");
SourcePath unmatchedSource = new FakeSourcePath("foo/b.py");
PythonTestBuilder builder =
PythonTestBuilder.create(target)
.setPlatformSrcs(
PatternMatchedCollection.<SourceList>builder()
.add(
Pattern.compile(PythonTestUtils.PYTHON_PLATFORM.getFlavor().toString()),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource)))
.add(
Pattern.compile("won't match anything"),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource)))
.build());
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
PythonTest test =
builder.build(
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
targetGraph);
assertThat(
test.getBinary().getComponents().getModules().values(),
Matchers.allOf(
Matchers.hasItem(matchedSource),
Matchers.not(Matchers.hasItem(unmatchedSource))));
}
@Test
public void platformResources() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:test");
SourcePath matchedSource = new FakeSourcePath("foo/a.dat");
SourcePath unmatchedSource = new FakeSourcePath("foo/b.dat");
PythonTestBuilder builder =
PythonTestBuilder.create(target)
.setPlatformResources(
PatternMatchedCollection.<SourceList>builder()
.add(
Pattern.compile(PythonTestUtils.PYTHON_PLATFORM.getFlavor().toString()),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource)))
.add(
Pattern.compile("won't match anything"),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource)))
.build());
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
PythonTest test =
builder.build(
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
targetGraph);
assertThat(
test.getBinary().getComponents().getResources().values(),
Matchers.allOf(
Matchers.hasItem(matchedSource),
Matchers.not(Matchers.hasItem(unmatchedSource))));
}
@Test
public void explicitPythonHome() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PythonPlatform platform1 =
PythonPlatform.of(
InternalFlavor.of("pyPlat1"),
new PythonEnvironment(Paths.get("python2.6"), PythonVersion.of("CPython", "2.6")),
Optional.empty());
PythonPlatform platform2 =
PythonPlatform.of(
InternalFlavor.of("pyPlat2"),
new PythonEnvironment(Paths.get("python2.7"), PythonVersion.of("CPython", "2.7")),
Optional.empty());
PythonTestBuilder builder =
PythonTestBuilder.create(
BuildTargetFactory.newInstance("//:bin"),
FlavorDomain.of("Python Platform", platform1, platform2));
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
PythonTest test1 =
builder
.setPlatform(platform1.getFlavor().toString())
.build(
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
targetGraph);
assertThat(test1.getBinary().getPythonPlatform(), Matchers.equalTo(platform1));
PythonTest test2 =
builder
.setPlatform(platform2.getFlavor().toString())
.build(
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer()),
filesystem,
targetGraph);
assertThat(test2.getBinary().getPythonPlatform(), Matchers.equalTo(platform2));
}
@Test
public void runtimeDepOnDeps() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
for (PythonBuckConfig.PackageStyle packageStyle : PythonBuckConfig.PackageStyle.values()) {
CxxBinaryBuilder cxxBinaryBuilder =
new CxxBinaryBuilder(BuildTargetFactory.newInstance("//:dep"));
PythonLibraryBuilder pythonLibraryBuilder =
new PythonLibraryBuilder(BuildTargetFactory.newInstance("//:lib"))
.setDeps(ImmutableSortedSet.of(cxxBinaryBuilder.getTarget()));
PythonTestBuilder pythonTestBuilder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:test"))
.setDeps(ImmutableSortedSet.of(pythonLibraryBuilder.getTarget()))
.setPackageStyle(packageStyle);
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
cxxBinaryBuilder.build(),
pythonLibraryBuilder.build(),
pythonTestBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule cxxBinary = cxxBinaryBuilder.build(resolver, filesystem, targetGraph);
pythonLibraryBuilder.build(resolver, filesystem, targetGraph);
PythonTest pythonTest =
pythonTestBuilder.build(resolver, filesystem, targetGraph);
assertThat(
String.format(
"Transitive runtime deps of %s [%s]",
pythonTest,
packageStyle.toString()),
BuildRules.getTransitiveRuntimeDeps(pythonTest, resolver),
Matchers.hasItem(cxxBinary.getBuildTarget()));
}
}
@Test
public void packageStyleParam() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PythonTestBuilder builder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:bin"))
.setPackageStyle(PythonBuckConfig.PackageStyle.INPLACE);
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PythonTest pythonTest = builder.build(resolver, filesystem, targetGraph);
assertThat(
pythonTest.getBinary(),
Matchers.instanceOf(PythonInPlaceBinary.class));
builder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:bin"))
.setPackageStyle(PythonBuckConfig.PackageStyle.STANDALONE);
targetGraph = TargetGraphFactory.newInstance(builder.build());
resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
pythonTest = builder.build(resolver, filesystem, targetGraph);
assertThat(
pythonTest.getBinary(),
Matchers.instanceOf(PythonPackagedBinary.class));
}
@Test
public void pexExecutorIsAddedToTestRuntimeDeps() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
ShBinaryBuilder pexExecutorBuilder =
new ShBinaryBuilder(BuildTargetFactory.newInstance("//:pex_executor"))
.setMain(new FakeSourcePath("run.sh"));
PythonTestBuilder builder =
new PythonTestBuilder(
BuildTargetFactory.newInstance("//:bin"),
new PythonBuckConfig(
FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"python",
ImmutableMap.of(
"path_to_pex_executer",
pexExecutorBuilder.getTarget().toString())))
.build(),
new AlwaysFoundExecutableFinder()),
PythonTestUtils.PYTHON_PLATFORMS,
CxxPlatformUtils.DEFAULT_PLATFORM,
CxxPlatformUtils.DEFAULT_PLATFORMS);
builder
.setPackageStyle(PythonBuckConfig.PackageStyle.STANDALONE);
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
pexExecutorBuilder.build(),
builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer());
ShBinary pexExecutor = pexExecutorBuilder.build(resolver);
PythonTest binary = builder.build(resolver, filesystem, targetGraph);
assertThat(
binary.getRuntimeDeps().collect(MoreCollectors.toImmutableSet()),
Matchers.hasItem(pexExecutor.getBuildTarget()));
}
@Test
public void pexExecutorRuleIsAddedToParseTimeDeps() throws Exception {
ShBinaryBuilder pexExecutorBuilder =
new ShBinaryBuilder(BuildTargetFactory.newInstance("//:pex_executor"))
.setMain(new FakeSourcePath("run.sh"));
PythonTestBuilder builder =
new PythonTestBuilder(
BuildTargetFactory.newInstance("//:bin"),
new PythonBuckConfig(
FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"python",
ImmutableMap.of(
"path_to_pex_executer",
pexExecutorBuilder.getTarget().toString())))
.build(),
new AlwaysFoundExecutableFinder()),
PythonTestUtils.PYTHON_PLATFORMS,
CxxPlatformUtils.DEFAULT_PLATFORM,
CxxPlatformUtils.DEFAULT_PLATFORMS);
builder
.setPackageStyle(PythonBuckConfig.PackageStyle.STANDALONE);
assertThat(
builder.build().getExtraDeps(),
Matchers.hasItem(pexExecutorBuilder.getTarget()));
}
@Test
public void pexBuilderAddedToParseTimeDeps() {
final BuildTarget pexBuilder = BuildTargetFactory.newInstance("//:pex_builder");
PythonBuckConfig config =
new PythonBuckConfig(FakeBuckConfig.builder().build(), new AlwaysFoundExecutableFinder()) {
@Override
public Optional<BuildTarget> getPexExecutorTarget() {
return Optional.of(pexBuilder);
}
};
PythonTestBuilder inplaceBinary =
new PythonTestBuilder(
BuildTargetFactory.newInstance("//:bin"),
config,
PythonTestUtils.PYTHON_PLATFORMS,
CxxPlatformUtils.DEFAULT_PLATFORM,
CxxPlatformUtils.DEFAULT_PLATFORMS)
.setPackageStyle(PythonBuckConfig.PackageStyle.INPLACE);
assertThat(inplaceBinary.findImplicitDeps(), Matchers.not(Matchers.hasItem(pexBuilder)));
PythonTestBuilder standaloneBinary =
new PythonTestBuilder(
BuildTargetFactory.newInstance("//:bin"),
config,
PythonTestUtils.PYTHON_PLATFORMS,
CxxPlatformUtils.DEFAULT_PLATFORM,
CxxPlatformUtils.DEFAULT_PLATFORMS)
.setPackageStyle(PythonBuckConfig.PackageStyle.STANDALONE);
assertThat(standaloneBinary.findImplicitDeps(), Matchers.hasItem(pexBuilder));
}
@Test
public void versionedSrcs() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:lib");
SourcePath matchedSource = new FakeSourcePath("foo/a.py");
SourcePath unmatchedSource = new FakeSourcePath("foo/b.py");
GenruleBuilder depBuilder =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out");
AbstractNodeBuilder<?, ?, ?> builder =
PythonTestBuilder.create(target)
.setVersionedSrcs(
VersionMatchedCollection.<SourceList>builder()
.add(
ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource)))
.add(
ImmutableMap.of(depBuilder.getTarget(), Version.of("2.0")),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource)))
.build())
.setSelectedVersions(ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")));
TargetGraph targetGraph = TargetGraphFactory.newInstance(depBuilder.build(), builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer());
depBuilder.build(resolver, filesystem, targetGraph);
PythonTest test = (PythonTest) builder.build(resolver, filesystem, targetGraph);
assertThat(
test.getBinary().getComponents().getModules().values(),
Matchers.allOf(
Matchers.hasItem(matchedSource),
Matchers.not(Matchers.hasItem(unmatchedSource))));
}
@Test
public void versionedResources() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:lib");
SourcePath matchedSource = new FakeSourcePath("foo/a.py");
SourcePath unmatchedSource = new FakeSourcePath("foo/b.py");
GenruleBuilder depBuilder =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out");
AbstractNodeBuilder<?, ?, ?> builder =
PythonTestBuilder.create(target)
.setVersionedResources(
VersionMatchedCollection.<SourceList>builder()
.add(
ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource)))
.add(
ImmutableMap.of(depBuilder.getTarget(), Version.of("2.0")),
SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource)))
.build())
.setSelectedVersions(ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")));
TargetGraph targetGraph = TargetGraphFactory.newInstance(depBuilder.build(), builder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(
targetGraph,
new DefaultTargetNodeToBuildRuleTransformer());
depBuilder.build(resolver, filesystem, targetGraph);
PythonTest test = (PythonTest) builder.build(resolver, filesystem, targetGraph);
assertThat(
test.getBinary().getComponents().getResources().values(),
Matchers.allOf(
Matchers.hasItem(matchedSource),
Matchers.not(Matchers.hasItem(unmatchedSource))));
}
@Test
public void targetGraphOnlyDepsDoNotAffectRuleKey() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
for (PythonBuckConfig.PackageStyle packageStyle : PythonBuckConfig.PackageStyle.values()) {
// First, calculate the rule key of a python binary with no deps.
PythonTestBuilder pythonTestBuilder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:bin"))
.setPackageStyle(packageStyle);
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
pythonTestBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PythonTest pythonTestWithoutDep =
pythonTestBuilder.build(resolver, filesystem, targetGraph);
RuleKey ruleKeyWithoutDep = calculateRuleKey(resolver, pythonTestWithoutDep);
// Next, calculate the rule key of a python binary with a deps on another binary.
CxxBinaryBuilder cxxBinaryBuilder =
new CxxBinaryBuilder(BuildTargetFactory.newInstance("//:dep"));
pythonTestBuilder.setDeps(ImmutableSortedSet.of(cxxBinaryBuilder.getTarget()));
targetGraph =
TargetGraphFactory.newInstance(
cxxBinaryBuilder.build(),
pythonTestBuilder.build());
resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
cxxBinaryBuilder.build(resolver, filesystem, targetGraph);
PythonTest pythonBinaryWithDep =
pythonTestBuilder.build(resolver, filesystem, targetGraph);
RuleKey ruleKeyWithDep = calculateRuleKey(resolver, pythonBinaryWithDep);
// Verify that the rule keys are identical.
assertThat(ruleKeyWithoutDep, Matchers.equalTo(ruleKeyWithDep));
}
}
@Test
public void platformDeps() throws Exception {
SourcePath libASrc = new FakeSourcePath("libA.py");
PythonLibraryBuilder libraryABuilder =
PythonLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:libA"))
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(libASrc)));
SourcePath libBSrc = new FakeSourcePath("libB.py");
PythonLibraryBuilder libraryBBuilder =
PythonLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:libB"))
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(libBSrc)));
PythonTestBuilder binaryBuilder =
PythonTestBuilder.create(BuildTargetFactory.newInstance("//:bin"))
.setPlatformDeps(
PatternMatchedCollection.<ImmutableSortedSet<BuildTarget>>builder()
.add(
Pattern.compile(
CxxPlatformUtils.DEFAULT_PLATFORM.getFlavor().toString(),
Pattern.LITERAL),
ImmutableSortedSet.of(libraryABuilder.getTarget()))
.add(
Pattern.compile("matches nothing", Pattern.LITERAL),
ImmutableSortedSet.of(libraryBBuilder.getTarget()))
.build());
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
libraryABuilder.build(),
libraryBBuilder.build(),
binaryBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PythonTest test = (PythonTest) resolver.requireRule(binaryBuilder.getTarget());
assertThat(
test.getBinary().getComponents().getModules().values(),
Matchers.allOf(Matchers.hasItem(libASrc), Matchers.not(Matchers.hasItem(libBSrc))));
}
@Test
public void cxxPlatform() throws Exception {
CxxPlatform platformA =
CxxPlatformUtils.DEFAULT_PLATFORM.withFlavor(InternalFlavor.of("platA"));
CxxPlatform platformB =
CxxPlatformUtils.DEFAULT_PLATFORM.withFlavor(InternalFlavor.of("platB"));
FlavorDomain<CxxPlatform> cxxPlatforms =
FlavorDomain.from("C/C++ platform", ImmutableList.of(platformA, platformB));
SourcePath libASrc = new FakeSourcePath("libA.py");
PythonLibraryBuilder libraryABuilder =
new PythonLibraryBuilder(
BuildTargetFactory.newInstance("//:libA"),
PythonTestUtils.PYTHON_PLATFORMS,
cxxPlatforms)
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(libASrc)));
SourcePath libBSrc = new FakeSourcePath("libB.py");
PythonLibraryBuilder libraryBBuilder =
new PythonLibraryBuilder(
BuildTargetFactory.newInstance("//:libB"),
PythonTestUtils.PYTHON_PLATFORMS,
cxxPlatforms)
.setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(libBSrc)));
PythonTestBuilder binaryBuilder =
new PythonTestBuilder(
BuildTargetFactory.newInstance("//:bin"),
PythonTestUtils.PYTHON_CONFIG,
PythonTestUtils.PYTHON_PLATFORMS,
CxxPlatformUtils.DEFAULT_PLATFORM,
cxxPlatforms)
.setCxxPlatform(platformA.getFlavor())
.setPlatformDeps(
PatternMatchedCollection.<ImmutableSortedSet<BuildTarget>>builder()
.add(
Pattern.compile(platformA.getFlavor().toString(), Pattern.LITERAL),
ImmutableSortedSet.of(libraryABuilder.getTarget()))
.add(
Pattern.compile(platformB.getFlavor().toString(), Pattern.LITERAL),
ImmutableSortedSet.of(libraryBBuilder.getTarget()))
.build());
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
libraryABuilder.build(),
libraryBBuilder.build(),
binaryBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PythonTest test = (PythonTest) resolver.requireRule(binaryBuilder.getTarget());
assertThat(
test.getBinary().getComponents().getModules().values(),
Matchers.allOf(Matchers.hasItem(libASrc), Matchers.not(Matchers.hasItem(libBSrc))));
}
private RuleKey calculateRuleKey(BuildRuleResolver ruleResolver, BuildRule rule) {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
new RuleKeyFieldLoader(0),
new StackedFileHashCache(
ImmutableList.of(
DefaultFileHashCache.createDefaultFileHashCache(rule.getProjectFilesystem()))),
new SourcePathResolver(ruleFinder),
ruleFinder);
return ruleKeyFactory.build(rule);
}
}
| |
/**
* Copyright 2010 Newcastle University
*
* http://research.ncl.ac.uk/smart/
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oltu.oauth2.as;
import org.apache.commons.codec.binary.Base64;
import org.apache.oltu.oauth2.as.request.OAuthAuthzRequest;
import org.apache.oltu.oauth2.as.request.OAuthRequest;
import org.apache.oltu.oauth2.as.request.OAuthTokenRequest;
import org.apache.oltu.oauth2.common.OAuth;
import org.apache.oltu.oauth2.common.error.OAuthError;
import org.apache.oltu.oauth2.common.exception.OAuthProblemException;
import org.apache.oltu.oauth2.common.exception.OAuthSystemException;
import org.apache.oltu.oauth2.common.message.types.GrantType;
import org.apache.oltu.oauth2.common.message.types.ResponseType;
import org.apache.oltu.oauth2.common.utils.OAuthUtils;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import java.util.Set;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.*;
public class OAuthRequestTest {
public static final String REDIRECT_URI = "http://www.example.com/callback";
public static final String CLIENT_ID = "test_client";
public static final String ACCESS_GRANT = "test_code";
public static final String SECRET = "secret";
public static final String USERNAME = "test_username";
public static final String PASSWORD = "test_password";
public static final String REFRESH_TOKEN = "refresh_token";
@Test
public void testWrongResponseGetRequestParam() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectOauthResponseType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectParam("param", "someparam")
.build();
replay(request);
assertInvalidOAuthRequest(request);
verify(request);
request = new OauthMockRequestBuilder()
.expectOauthResponseType(null)
.expectRedirectUri(REDIRECT_URI)
.expectParam("param", "someparam")
.build();
replay(request);
assertInvalidOAuthRequest(request);
verify(request);
}
private void assertInvalidOAuthRequest(HttpServletRequest request) throws OAuthSystemException {
try {
new OAuthAuthzRequest(request);
fail("Exception expected");
} catch (OAuthProblemException e) {
assertEquals(OAuthError.TokenResponse.INVALID_REQUEST, e.getError());
}
}
@Test
public void testCodeRequestInvalidMethod() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectHttpMethod(OAuth.HttpMethod.PUT)
.expectOauthResponseType(ResponseType.CODE.toString())
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.build();
replay(request);
assertInvalidOAuthRequest(request);
verify(request);
}
@Test
public void testCodeRequestMissingParameter() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectHttpMethod(OAuth.HttpMethod.GET)
.expectOauthResponseType(ResponseType.CODE.toString())
.expectRedirectUri(REDIRECT_URI)
.expectClientId(null)
.build();
replay(request);
assertInvalidOAuthRequest(request);
verify(request);
}
@Test
public void testValidCodeRequest() throws Exception {
assertValidCodeRequest(OAuth.HttpMethod.GET);
assertValidCodeRequest(OAuth.HttpMethod.POST);
}
private void assertValidCodeRequest(String httpMethod) throws OAuthSystemException {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectHttpMethod(httpMethod)
.expectOauthResponseType(ResponseType.CODE.toString())
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.build();
replay(request);
try {
new OAuthAuthzRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
verify(request);
}
@Test
public void testTokenWrongGrantType() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectGrantType(OAuth.ContentType.URL_ENCODED)
.expectParam("param", "someparam")
.build();
replay(request);
assertInvalidTokenRequest(request);
request = new OauthMockRequestBuilder()
.expectRedirectUri(REDIRECT_URI)
.expectGrantType(null)
.expectParam("param", "someparam")
.build();
replay(request);
assertInvalidTokenRequest(request);
}
private void assertInvalidTokenRequest(HttpServletRequest request) throws OAuthSystemException {
try {
new OAuthTokenRequest(request);
fail("Exception expected");
} catch (OAuthProblemException e) {
assertEquals(OAuthError.TokenResponse.INVALID_REQUEST, e.getError());
}
verify(request);
}
@Test
public void testTokenRequestInvalidMethod() throws Exception {
HttpServletRequest request = mockTokenRequestInvalidMethod(GrantType.AUTHORIZATION_CODE.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidMethod(GrantType.PASSWORD.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidMethod(GrantType.REFRESH_TOKEN.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidMethod(null);
assertInvalidTokenRequest(request);
}
private HttpServletRequest mockTokenRequestInvalidMethod(String grantType) {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectHttpMethod(OAuth.HttpMethod.GET)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectGrantType(grantType)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.build();
replay(request);
return request;
}
@Test
public void testTokenRequestInvalidContentType() throws Exception {
HttpServletRequest request = mockTokenRequestInvalidContentType(GrantType.AUTHORIZATION_CODE.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidContentType(GrantType.PASSWORD.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidContentType(GrantType.REFRESH_TOKEN.toString());
assertInvalidTokenRequest(request);
request = mockTokenRequestInvalidContentType(null);
assertInvalidTokenRequest(request);
}
private HttpServletRequest mockTokenRequestInvalidContentType(String grantType) {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectGrantType(grantType)
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.JSON)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.build();
replay(request);
return request;
}
@Test
public void testTokenAuthCodeRequestMissingParameter() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectBasicAuthHeader(null)
.expectGrantType(OAuth.OAUTH_GRANT_TYPE)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(null)
.expectClientSecret(SECRET)
.expectAccessGrant(ACCESS_GRANT)
.build();
replay(request);
assertInvalidTokenRequest(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.AUTHORIZATION_CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectBasicAuthHeader(null)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectRedirectUri(null)
.expectAccessGrant(null)
.build();
replay(request);
assertInvalidTokenRequest(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.AUTHORIZATION_CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectBasicAuthHeader(null)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectAccessGrant(null)
.build();
replay(request);
assertInvalidTokenRequest(request);
}
@Test
public void testTokenAuthCodeRequestWithBasicAuthenticationMissingParameter() throws Exception {
HttpServletRequest request = mockOAuthTokenRequestBasicAuth(CLIENT_ID, null);
assertInvalidTokenRequest(request);
request = mockOAuthTokenRequestBasicAuth(null, SECRET);
assertInvalidTokenRequest(request);
// Don't allow to mix basic auth header and body params.
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.AUTHORIZATION_CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(CLIENT_ID)
.expectClientSecret(null)
.expectRedirectUri(REDIRECT_URI)
.expectAccessGrant(ACCESS_GRANT)
.expectBasicAuthHeader(createBasicAuthHeader(null, SECRET))
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
}
private HttpServletRequest mockOAuthTokenRequestBasicAuth(String clientId, String clientSecret) {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.AUTHORIZATION_CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(null)
.expectClientSecret(null)
.expectRedirectUri(REDIRECT_URI)
.expectAccessGrant(ACCESS_GRANT)
.expectBasicAuthHeader(createBasicAuthHeader(clientId, clientSecret))
.build();
replay(request);
return request;
}
private String createBasicAuthHeader(String clientId, String clientSecret) {
clientSecret = OAuthUtils.isEmpty(clientSecret) ? "" : clientSecret;
clientId = OAuthUtils.isEmpty(clientId) ? "" : clientId;
final String authString = clientId + ":" + clientSecret;
return "basic " + Base64.encodeBase64String(authString.getBytes());
}
@Test
public void testTokenPasswordRequestMissingParameter() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.PASSWORD.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(createBasicAuthHeader(null, SECRET))
.expectOauthUsername(null)
.expectOauthPassword(SECRET)
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.PASSWORD.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.expectBasicAuthHeader(createBasicAuthHeader(null, SECRET))
.expectOauthUsername(USERNAME)
.expectOauthPassword("")
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.PASSWORD.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(null)
.expectClientSecret("")
.expectBasicAuthHeader(null)
.expectOauthUsername(USERNAME)
.expectOauthPassword(PASSWORD)
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
}
@Test
public void testRefreshTokenRequestMissingParameter() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.REFRESH_TOKEN.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(null)
.expectOauthRefreshToken(null)
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.REFRESH_TOKEN.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId("")
.expectClientSecret(SECRET)
.expectBasicAuthHeader(null)
.expectOauthRefreshToken(REFRESH_TOKEN)
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.REFRESH_TOKEN.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectRedirectUri(REDIRECT_URI)
.expectClientId(null)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(SECRET)
.expectOauthRefreshToken(null)
.build();
replay(request);
assertInvalidTokenRequest(request);
verify(request);
}
@Test
public void testValidTokenRequest() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.AUTHORIZATION_CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(null)
.expectAccessGrant(ACCESS_GRANT)
.expectRedirectUri(REDIRECT_URI)
.build();
replay(request);
OAuthTokenRequest req = null;
try {
req = new OAuthTokenRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
assertEquals(GrantType.AUTHORIZATION_CODE.toString(), req.getGrantType());
assertEquals(CLIENT_ID, req.getClientId());
assertEquals(REDIRECT_URI, req.getRedirectURI());
assertEquals(ACCESS_GRANT, req.getCode());
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.PASSWORD.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(null)
.expectOauthUsername(USERNAME)
.expectOauthPassword(PASSWORD)
.build();
replay(request);
try {
req = new OAuthTokenRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
assertEquals(CLIENT_ID, req.getClientId());
assertEquals(USERNAME, req.getUsername());
assertEquals(PASSWORD, req.getPassword());
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.CLIENT_CREDENTIALS.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectBasicAuthHeader(createBasicAuthHeader(CLIENT_ID, SECRET))
.build();
replay(request);
try {
req = new OAuthTokenRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.REFRESH_TOKEN.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(CLIENT_ID)
.expectClientSecret(SECRET)
.expectBasicAuthHeader(null)
.expectOauthRefreshToken(REFRESH_TOKEN)
.build();
replay(request);
try {
req = new OAuthTokenRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
assertEquals(CLIENT_ID, req.getClientId());
assertEquals(REFRESH_TOKEN, req.getRefreshToken());
assertEquals(SECRET, req.getClientSecret());
verify(request);
request = new OauthMockRequestBuilder()
.expectGrantType(GrantType.REFRESH_TOKEN.toString())
.expectHttpMethod(OAuth.HttpMethod.POST)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId("")
.expectClientSecret("")
.expectBasicAuthHeader(createBasicAuthHeader(CLIENT_ID, SECRET))
.expectOauthRefreshToken(REFRESH_TOKEN)
.build();
replay(request);
try {
req = new OAuthTokenRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
assertEquals(CLIENT_ID, req.getClientId());
assertEquals(REFRESH_TOKEN, req.getRefreshToken());
assertEquals(SECRET, req.getClientSecret());
verify(request);
}
@Test
public void testScopes() throws Exception {
HttpServletRequest request = new OauthMockRequestBuilder()
.expectOauthResponseType(ResponseType.CODE.toString())
.expectHttpMethod(OAuth.HttpMethod.GET)
.expectContentType(OAuth.ContentType.URL_ENCODED)
.expectClientId(CLIENT_ID)
.expectRedirectUri(REDIRECT_URI)
.expectScopes("album photo")
.build();
replay(request);
OAuthRequest req = null;
try {
req = new OAuthAuthzRequest(request);
} catch (OAuthProblemException e) {
fail("Exception not expected");
}
Set<String> scopes = req.getScopes();
assertTrue(findScope(scopes, "album"));
assertTrue(findScope(scopes, "photo"));
verify(request);
}
private boolean findScope(Set<String> scopes, String scope) {
for (String s : scopes) {
if (s.equals(scope)) {
return true;
}
}
return false;
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.multitenancy.suspensionstate;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
import java.util.Arrays;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.externaltask.ExternalTaskQuery;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.runtime.JobQuery;
import org.camunda.bpm.engine.runtime.ProcessInstanceQuery;
import org.camunda.bpm.engine.task.TaskQuery;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.engine.test.util.ProcessEngineTestRule;
import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule;
import org.camunda.bpm.model.bpmn.Bpmn;
import org.camunda.bpm.model.bpmn.BpmnModelInstance;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.RuleChain;
public class MultiTenancyProcessInstanceSuspensionStateTest {
protected static final String TENANT_ONE = "tenant1";
protected static final String TENANT_TWO = "tenant2";
protected static final String PROCESS_DEFINITION_KEY = "testProcess";
protected static final BpmnModelInstance PROCESS = Bpmn.createExecutableProcess(PROCESS_DEFINITION_KEY)
.startEvent()
.parallelGateway("fork")
.userTask()
.moveToLastGateway()
.sendTask()
.camundaType("external")
.camundaTopic("test")
.boundaryEvent()
.timerWithDuration("PT1M")
.done();
protected ProcessEngineRule engineRule = new ProvidedProcessEngineRule();
protected ProcessEngineTestRule testRule = new ProcessEngineTestRule(engineRule);
@Rule
public RuleChain ruleChain = RuleChain.outerRule(engineRule).around(testRule);
@Rule
public ExpectedException thrown= ExpectedException.none();
@Before
public void setUp() throws Exception {
testRule.deployForTenant(TENANT_ONE, PROCESS);
testRule.deployForTenant(TENANT_TWO, PROCESS);
testRule.deploy(PROCESS);
engineRule.getRuntimeService().createProcessInstanceByKey(PROCESS_DEFINITION_KEY).processDefinitionTenantId(TENANT_ONE).execute();
engineRule.getRuntimeService().createProcessInstanceByKey(PROCESS_DEFINITION_KEY).processDefinitionTenantId(TENANT_TWO).execute();
engineRule.getRuntimeService().createProcessInstanceByKey(PROCESS_DEFINITION_KEY).processDefinitionWithoutTenantId().execute();
}
@Test
public void suspendAndActivateProcessInstancesForAllTenants() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
// first suspend
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
// then activate
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.activate();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
}
@Test
public void suspendProcessInstanceForTenant() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void suspendProcessInstanceForNonTenant() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().withoutTenantId().count(), is(1L));
}
@Test
public void activateProcessInstanceForTenant() {
// given suspended process instances
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void activateProcessInstanceForNonTenant() {
// given suspended process instances
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().withoutTenantId().count(), is(1L));
}
@Test
public void suspendAndActivateProcessInstancesIncludingUserTasksForAllTenants() {
// given activated user tasks
TaskQuery query = engineRule.getTaskService().createTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
// first suspend
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
// then activate
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.activate();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
}
@Test
public void suspendProcessInstanceIncludingUserTaskForTenant() {
// given activated user tasks
TaskQuery query = engineRule.getTaskService().createTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void suspendProcessInstanceIncludingUserTaskForNonTenant() {
// given activated user tasks
TaskQuery query = engineRule.getTaskService().createTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().withoutTenantId().count(), is(1L));
}
@Test
public void activateProcessInstanceIncludingUserTaskForTenant() {
// given suspended user tasks
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
TaskQuery query = engineRule.getTaskService().createTaskQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void activateProcessInstanceIncludingUserTaskForNonTenant() {
// given suspended user tasks
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
TaskQuery query = engineRule.getTaskService().createTaskQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().withoutTenantId().count(), is(1L));
}
@Test
public void suspendAndActivateProcessInstancesIncludingExternalTasksForAllTenants() {
// given activated external tasks
ExternalTaskQuery query = engineRule.getExternalTaskService().createExternalTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
// first suspend
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
// then activate
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.activate();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
}
@Test
public void suspendProcessInstanceIncludingExternalTaskForTenant() {
// given activated external tasks
ExternalTaskQuery query = engineRule.getExternalTaskService().createExternalTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void suspendProcessInstanceIncludingExternalTaskForNonTenant() {
// given activated external tasks
ExternalTaskQuery query = engineRule.getExternalTaskService().createExternalTaskQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().singleResult().getTenantId(), is(nullValue()));
}
@Test
public void activateProcessInstanceIncludingExternalTaskForTenant() {
// given suspended external tasks
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
ExternalTaskQuery query = engineRule.getExternalTaskService().createExternalTaskQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void activateProcessInstanceIncludingExternalTaskForNonTenant() {
// given suspended external tasks
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
ExternalTaskQuery query = engineRule.getExternalTaskService().createExternalTaskQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().singleResult().getTenantId(), is(nullValue()));
}
@Test
public void suspendAndActivateProcessInstancesIncludingJobsForAllTenants() {
// given activated jobs
JobQuery query = engineRule.getManagementService().createJobQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
// first suspend
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
// then activate
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.activate();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
}
@Test
public void suspendProcessInstanceIncludingJobForTenant() {
// given activated jobs
JobQuery query = engineRule.getManagementService().createJobQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void suspendProcessInstanceIncludingJobForNonTenant() {
// given activated jobs
JobQuery query = engineRule.getManagementService().createJobQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.suspend();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().singleResult().getTenantId(), is(nullValue()));
}
@Test
public void activateProcessInstanceIncludingJobForTenant() {
// given suspended job
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
JobQuery query = engineRule.getManagementService().createJobQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionTenantId(TENANT_ONE)
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void activateProcessInstanceIncludingJobForNonTenant() {
// given suspended jobs
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
JobQuery query = engineRule.getManagementService().createJobQuery();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.processDefinitionWithoutTenantId()
.activate();
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().count(), is(1L));
assertThat(query.active().singleResult().getTenantId(), is(nullValue()));
}
@Test
public void suspendProcessInstanceNoAuthenticatedTenants() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getIdentityService().setAuthentication("user", null, null);
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
engineRule.getIdentityService().clearAuthentication();
assertThat(query.active().count(), is(2L));
assertThat(query.suspended().count(), is(1L));
assertThat(query.suspended().withoutTenantId().count(), is(1L));
}
@Test
public void failToSuspendProcessInstanceByProcessDefinitionIdNoAuthenticatedTenants() {
ProcessDefinition processDefinition = engineRule.getRepositoryService().createProcessDefinitionQuery()
.processDefinitionKey(PROCESS_DEFINITION_KEY).tenantIdIn(TENANT_ONE).singleResult();
// declare expected exception
thrown.expect(ProcessEngineException.class);
thrown.expectMessage("Cannot update the process definition '"
+ processDefinition.getId() +"' because it belongs to no authenticated tenant");
engineRule.getIdentityService().setAuthentication("user", null, null);
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionId(processDefinition.getId())
.suspend();
}
@Test
public void suspendProcessInstanceWithAuthenticatedTenant() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getIdentityService().setAuthentication("user", null, Arrays.asList(TENANT_ONE));
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
engineRule.getIdentityService().clearAuthentication();
assertThat(query.active().count(), is(1L));
assertThat(query.suspended().count(), is(2L));
assertThat(query.active().tenantIdIn(TENANT_TWO).count(), is(1L));
assertThat(query.suspended().withoutTenantId().count(), is(1L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE).count(), is(1L));
}
@Test
public void suspendProcessInstanceDisabledTenantCheck() {
// given activated process instances
ProcessInstanceQuery query = engineRule.getRuntimeService().createProcessInstanceQuery();
assertThat(query.active().count(), is(3L));
assertThat(query.suspended().count(), is(0L));
engineRule.getProcessEngineConfiguration().setTenantCheckEnabled(false);
engineRule.getIdentityService().setAuthentication("user", null, null);
engineRule.getRuntimeService()
.updateProcessInstanceSuspensionState()
.byProcessDefinitionKey(PROCESS_DEFINITION_KEY)
.suspend();
assertThat(query.active().count(), is(0L));
assertThat(query.suspended().count(), is(3L));
assertThat(query.suspended().tenantIdIn(TENANT_ONE, TENANT_TWO).count(), is(2L));
assertThat(query.suspended().withoutTenantId().count(), is(1L));
}
}
| |
/*
plugin : Synapse_.java
author : Max Larsson
e-mail : max.larsson@liu.se
This ImageJ plugin is for use in conjunction with Vesicle.py.
Copyright 2001-2014 Max Larsson <max.larsson@liu.se>
This software is released under the MIT license.
*/
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.Random;
import ij.*;
import ij.io.*;
import ij.gui.*;
import ij.plugin.frame.*;
import ij.measure.*;
interface VersionSyn {
String title = "Synapse";
String author = "Max Larsson";
String version = "1.2.1";
String year = "2018";
String month = "May";
String day = "31";
String email = "max.larsson@liu.se";
String homepage = "http://www.hu.liu.se/forskning/larsson-max/software";
}
interface Options_Syn {
Color poselCol = Color.cyan;
Color prselCol = Color.green;
Color psdCol = Color.orange;
Color pointCol = Color.yellow;
Color randomCol = Color.magenta;
Color holeCol = Color.red;
Color textCol = Color.blue;
}
public class Synapse_ extends PlugInFrame implements Options_Syn, ActionListener {
Panel panel;
static Frame instance;
static Choice postruct;
static Choice prestruct;
static Frame infoFrame;
static Panel subPanelA, subPanelB;
GridBagLayout infoPanel;
GridBagConstraints c;
Label profile_nLabel;
Label preelnLabel;
Label poselnLabel;
Label psdnLabel;
Label pnLabel;
Label holenLabel;
Label randomPlacedLabel;
Label commentLabel;
Label scaleLabel;
ProfileData profile;
ImagePlus imp;
public Synapse_() {
super("Synapse");
if (instance != null) {
instance.toFront();
return;
}
instance = this;
profile = new ProfileData();
IJ.register(Synapse_.class);
setLayout(new FlowLayout());
setBackground(SystemColor.control);
panel = new Panel();
panel.setLayout(new GridLayout(0, 1, 4, 1));
panel.setBackground(SystemColor.control);
addButton("Save profile");
addButton("Clear profile");
panel.add(new Label(""));
panel.add(new Label("Define selection as:"));
addButton("Presynaptic element");
addButton("Postsynaptic element");
addButton("Postsynaptic density");
addButton("Hole");
addButton("Points");
panel.add(new Label(""));
addButton("Place random points");
panel.add(new Label(""));
subPanelA = new Panel();
panel.add(subPanelA);
subPanelA.setLayout(new GridLayout(1, 2, 0, 0));
subPanelA.add(new Label("Presyn. structure"));
subPanelA.add(new Label("Postsyn. structure"));
subPanelA.setVisible(true);
subPanelB = new Panel();
panel.add(subPanelB);
subPanelB.setLayout(new GridLayout(1, 2, 0, 0));
prestruct = new Choice();
prestruct.add("Not specified");
subPanelB.add(prestruct);
postruct = new Choice();
postruct.add("Not specified");
subPanelB.add(postruct);
subPanelB.setVisible(true);
panel.add(new Label(""));
panel.add(new Label("Delete profile components:"));
addButton("Delete presynaptic element");
addButton("Delete postsynaptic element");
addButton("Delete postsynaptic density");
addButton("Delete points");
addButton("Delete random points");
addButton("Delete selected component");
panel.add(new Label(""));
panel.add(new Label("Other:"));
addButton("Add comment");
panel.add(new Label(""));
panel.add(new Label("Settings:"));
addButton("Set profile n");
addButton("Define presynaptic structure");
addButton("Define postsynaptic structure");
addButton("Options...");
addButton("About...");
add(panel);
pack();
setVisible(true);
infoFrame = new Frame("Profile info");
infoPanel = new GridBagLayout();
Font f = new Font("Helvetica", 0, 10);
infoFrame.setFont(f);
infoFrame.setBackground(SystemColor.control);
infoFrame.setLocation(0, instance.getLocation().x + instance.getSize().height + 3);
infoFrame.setIconImage(instance.getIconImage());
infoFrame.setResizable(false);
c = new GridBagConstraints();
c.fill = GridBagConstraints.BOTH;
c.weightx = 1.0;
addStaticInfoLabel("Profile n:");
profile_nLabel = new Label(IJ.d2s(profile.ntot, 0), Label.RIGHT);
addVarInfoLabel(profile_nLabel);
addStaticInfoLabel("Presynaptic element nodes:");
preelnLabel = new Label("0", Label.RIGHT);
addVarInfoLabel(preelnLabel);
addStaticInfoLabel("Postsynaptic element nodes:");
poselnLabel = new Label("0", Label.RIGHT);
addVarInfoLabel(poselnLabel);
addStaticInfoLabel("Postsynaptic densities:");
psdnLabel = new Label("0", Label.RIGHT);
addVarInfoLabel(psdnLabel);
addStaticInfoLabel("Points:");
pnLabel = new Label("0", Label.RIGHT);
addVarInfoLabel(pnLabel);
addStaticInfoLabel("Holes:");
holenLabel = new Label("0", Label.RIGHT);
addVarInfoLabel(holenLabel);
addStaticInfoLabel("Random points:");
randomPlacedLabel = new Label("no", Label.RIGHT);
addVarInfoLabel(randomPlacedLabel);
addStaticInfoLabel("Pixel width:");
scaleLabel = new Label("N/D", Label.RIGHT);
addVarInfoLabel(scaleLabel);
addStaticInfoLabel("Comment:");
commentLabel = new Label("", Label.RIGHT);
addVarInfoLabel(commentLabel);
infoFrame.setLayout(infoPanel);
infoFrame.pack();
infoFrame.setSize(instance.getSize().width, infoFrame.getSize().height);
infoFrame.setVisible(true);
instance.requestFocus();
}
void addButton(String label) {
Button b = new Button(label);
b.addActionListener(this);
panel.add(b);
}
void addStaticInfoLabel(String label) {
Label l = new Label(label, Label.LEFT);
c.gridwidth = 1;
infoPanel.setConstraints(l, c);
infoFrame.add(l);
}
void addVarInfoLabel(Label l) {
c.gridwidth = GridBagConstraints.REMAINDER;
infoPanel.setConstraints(l, c);
infoFrame.add(l);
}
PolygonRoi getPolylineRoi(ImagePlus imp) {
Roi roi = imp.getRoi();
if (roi == null || roi.getType() != Roi.POLYLINE) {
IJ.error("Segmented line selection required.");
return null;
} else {
return (PolygonRoi) roi;
}
}
PolygonRoi getPolygonRoi(ImagePlus imp) {
Roi roi = imp.getRoi();
if (roi == null || roi.getType() != Roi.POLYGON) {
IJ.error("Synapse", "Polygon selection required.");
return null;
} else {
return (PolygonRoi) roi;
}
}
PolygonRoi getPointRoi(ImagePlus imp) {
Roi roi = imp.getRoi();
if (roi == null || roi.getType() != Roi.POINT) {
IJ.error("Point selection required.");
return null;
} else {
return (PolygonRoi) roi;
}
}
void updateInfoPanel() {
double pixelwidth;
String unit;
profile_nLabel.setText(IJ.d2s(profile.ntot, 0));
pnLabel.setText(IJ.d2s(profile.getNumPoints("points"), 0));
preelnLabel.setText(IJ.d2s(profile.getNumPoints("presynaptic element"), 0));
poselnLabel.setText(IJ.d2s(profile.getNumPoints("postsynaptic element"), 0));
psdnLabel.setText(IJ.d2s(profile.getNum("postsynaptic density"), 0));
holenLabel.setText(IJ.d2s(profile.getNum("hole"), 0));
if (profile.overlay.getIndex("random points") != -1) {
randomPlacedLabel.setText("yes");
} else {
randomPlacedLabel.setText("no");
}
Calibration c = imp.getCalibration();
if (c.getUnit().equals("micron")) {
pixelwidth = c.pixelWidth * 1000;
unit = "nm";
} else {
pixelwidth = c.pixelWidth;
unit = c.getUnit();
}
scaleLabel.setText(IJ.d2s(pixelwidth, 2) + " " + unit);
commentLabel.setText(profile.comment);
}
public boolean isImage(ImagePlus imp) {
if (imp == null) {
IJ.beep();
IJ.showStatus("No image");
return false;
}
return true;
}
public void actionPerformed(ActionEvent e) {
PolygonRoi p;
Polygon randomPol;
PointRoi randomRoi;
int i, x, y;
String s;
String command = e.getActionCommand();
if (command == null) {
return;
}
imp = WindowManager.getCurrentImage();
imp.setOverlay(profile.overlay);
if (imp != null && imp.getType() != ImagePlus.COLOR_RGB) {
imp.setProcessor(imp.getTitle(), imp.getProcessor().convertToRGB());
}
if (command.equals("Save profile")) {
if (!isImage(imp)) {
return;
}
if (!profile.dirty) {
IJ.showMessage("Nothing to save.");
} else {
if (prestruct.getItemCount() > 1) {
profile.preProfile = prestruct.getSelectedItem();
if (profile.preProfile.equals("Not specified")) {
IJ.showMessage("Presynaptic structure not specified.");
return;
}
}
if (postruct.getItemCount() > 1) {
profile.poProfile = postruct.getSelectedItem();
if (profile.poProfile.equals("Not specified")) {
IJ.showMessage("Postsynaptic structure not specified.");
return;
}
}
boolean saved = profile.save(imp);
if (saved) {
profile.clear();
}
}
}
if (command.equals("Clear profile")) {
if (!isImage(imp)) {
return;
}
if (profile.dirty) {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Profile", "Profile has not been saved. Save to file?");
if (d.yesPressed()) {
profile.dirty = !profile.save(imp);
} else if (!d.cancelPressed()) {
profile.dirty = false;
}
}
if (!profile.dirty) {
profile.clear();
IJ.showStatus("Profile cleared.");
}
}
if (command.equals("Presynaptic element")) {
if (!isImage(imp) || !profile.isSameImage(imp) ||
profile.isDefined("presynaptic element", "Presynaptic element")) {
return;
}
if ((p = getPolylineRoi(imp)) != null) {
p.setName("presynaptic element");
p.setStrokeColor(prselCol);
profile.overlay.add(p);
profile.dirty = true;
}
}
if (command.equals("Postsynaptic element")) {
if (!isImage(imp) || !profile.isSameImage(imp) ||
profile.isDefined("postsynaptic element", "Postsynaptic element")) {
return;
}
if ((p = getPolylineRoi(imp)) != null) {
p.setName("postsynaptic element");
p.setStrokeColor(poselCol);
profile.overlay.add(p);
profile.dirty = true;
}
}
if (command.equals("Postsynaptic density")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
if (profile.getNum("postsynaptic density") != 0) {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "PSD already contains coordinates. Add " +
"a new PSD? To overwrite, cancel and delete old instance first.");
if (d.cancelPressed()) {
return;
}
}
if ((p = getPolylineRoi(imp)) != null) {
p.setName("postsynaptic density");
p.setStrokeColor(psdCol);
profile.overlay.add(p);
profile.dirty = true;
}
}
if (command.equals("Hole")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
if ((p = getPolygonRoi(imp)) != null) {
p.setName("hole");
p.setStrokeColor(holeCol);
profile.overlay.add(p);
profile.dirty = true;
}
}
if (command.equals("Points")) {
if (!isImage(imp) || !profile.isSameImage(imp) ||
profile.isDefined("points", "Points")) {
return;
}
if ((p = getPointRoi(imp)) != null) {
p.setName("points");
p.setStrokeColor(pointCol);
profile.overlay.add(p);
profile.dirty = true;
}
}
if (command.equals("Place random points")) {
if (!isImage(imp) || !profile.isSameImage(imp) ||
profile.overlay.getIndex("random points") != -1) {
return;
}
Random rnd = new Random();
randomPol = new Polygon();
for (i = 0; i < profile.randompn; i++) {
x = rnd.nextInt(imp.getWidth() - 1) + 1;
y = rnd.nextInt(imp.getHeight() - 1) + 1;
randomPol.addPoint(x, y);
}
randomRoi = new PointRoi(randomPol);
randomRoi.setHideLabels(true);
randomRoi.setName("random points");
randomRoi.setStrokeColor(randomCol);
profile.overlay.add(randomRoi);
}
if (command.equals("Delete presynaptic element")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
profile.deleteNamedComponent(imp, "presynaptic element");
}
if (command.equals("Delete postsynaptic element")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
profile.deleteNamedComponent(imp, "postsynaptic element");
}
if (command.equals("Delete postsynaptic density")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
profile.deleteNamedComponent(imp, "postsynaptic density");
}
if (command.equals("Delete points")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
profile.deleteNamedComponent(imp, "points");
}
if (command.equals("Delete random points")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
profile.deleteNamedComponent(imp, "random points");
}
if (command.equals("Delete selected component")) {
if ((imp.getRoi()) != null) {
profile.deleteSelectedComponent(imp);
}
}
if (command.equals("Set profile n")) {
s = IJ.getString("Set profile n", IJ.d2s(profile.ntot, 0));
profile.ntot = java.lang.Integer.parseInt(s);
}
if (command.equals("Define postsynaptic structure")) {
s = IJ.getString("Define postsynaptic structure", "");
if (!s.equals("")) {
for (i = 0; i < postruct.getItemCount(); i++) {
if (s.equals(postruct.getItem(i))) {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "Postsynaptic structure '" + s
+ "' already defined. Remove?");
if (d.yesPressed()) {
postruct.remove(i);
}
return;
}
}
postruct.add(s);
}
}
if (command.equals("Define presynaptic structure")) {
s = IJ.getString("Define presynaptic structure", "");
if (!s.equals("")) {
for (i = 0; i < prestruct.getItemCount(); i++) {
if (s.equals(prestruct.getItem(i))) {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "Presynaptic structure '" + s
+ "' already defined. Remove?");
if (d.yesPressed()) {
prestruct.remove(i);
}
return;
}
}
prestruct.add(s);
}
}
if (command.equals("Add comment")) {
if (!isImage(imp) || !profile.isSameImage(imp)) {
return;
}
s = IJ.getString("Comment: ", profile.comment);
if (!s.equals("")) {
profile.comment = s;
profile.dirty = true;
}
}
if (command.equals("Options...")) {
GenericDialog gd = new GenericDialog("Options");
gd.setInsets(0, 0, 0);
gd.addMessage("Random particles:");
gd.addNumericField("Random particle n:", profile.randompn, 0);
gd.showDialog();
if (gd.wasCanceled())
return;
profile.randompn = (int) gd.getNextNumber();
if (profile.randompn <= 0) {
IJ.error("Random point n must be larger than 0. Reverting to default value (40).");
profile.randompn = 40;
}
}
if (command.equals("About...")) {
String aboutHtml = String.format("<html><p><strong>%s" +
"</strong></p><br />" +
"<p>VersionSyn %s</p><br />" +
"<p>Last modified %s %s, %s.</p>" +
"<p> Copyright 2001 - %s %s.</p>" +
"<p> Released under the MIT license. " +
"</p><br />" +
"<p>E-mail: %s</p>" +
"<p>Web: %s</p><br /></html>",
VersionSyn.title,
VersionSyn.version,
VersionSyn.month,
VersionSyn.day,
VersionSyn.year,
VersionSyn.year,
VersionSyn.author,
VersionSyn.email,
VersionSyn.homepage);
new HTMLDialog(VersionSyn.title, aboutHtml);
}
updateInfoPanel();
imp.updateAndDraw();
IJ.showStatus("");
}
public void processWindowEvent(WindowEvent e) {
super.processWindowEvent(e);
if (e.getID()==WindowEvent.WINDOW_CLOSING) {
infoFrame.dispose();
infoFrame = null;
instance = null;
}
}
} // end of Synapse_
class ProfileData implements Options_Syn {
boolean dirty;
Overlay overlay;
int n, ntot, randompn, i;
int imgID;
String comment, preProfile, poProfile, prevImg, ID;
ProfileData() {
this.n = 0;
this.ntot = 1;
this.prevImg = "";
this.imgID = 0;
this.dirty = false;
this.overlay = new Overlay();
this.comment = "";
this.randompn = 200;
this.preProfile = "";
this.poProfile = "";
this.ID = "";
}
// Returns number of ROIs named 'name' in the overlay. Returns 0 if ROI not found in overlay.
public int getNum(String name) {
int i, n=0;
for (i = 0; i < this.overlay.size(); i++) {
if (this.overlay.get(i).getName().equals(name)) {
n++;
}
}
return n;
}
// Returns number of points in ROI named 'name' in the overlay. Returns 0 if ROI not found in overlay.
public int getNumPoints(String name) {
if (this.overlay.getIndex(name) == -1) {
return 0;
} else {
return this.overlay.get(this.overlay.getIndex(name)).getPolygon().npoints;
}
}
public void deleteSelectedComponent(ImagePlus imp) {
if (!this.overlay.contains(imp.getRoi())) {
IJ.error("The current selection does not define a profile component.");
} else {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "Delete " + imp.getRoi().getName() + "?");
if (d.yesPressed()) {
this.overlay.remove(imp.getRoi());
imp.deleteRoi();
}
}
}
public void deleteNamedComponent(ImagePlus imp, String name) {
if (this.overlay.getIndex(name) == -1) {
IJ.error("No " + name + " defined.");
} else {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "Delete " + name + "?");
if (d.yesPressed()) {
this.overlay.remove(this.overlay.getIndex(name));
}
}
}
public boolean isSameImage(ImagePlus imp) {
if (!this.dirty || this.imgID == 0) {
this.imgID = imp.getID();
return true;
} else if (this.imgID == imp.getID()) {
return true;
} else {
IJ.showMessage("All measurements must be performed on the same " +
"image.");
return false;
}
}
public boolean isDefined(String name, String errstr) {
if (this.overlay.getIndex(name) != -1) {
IJ.error(errstr + " already defined. Please delete old instance first.");
return true;
}
return false;
}
private boolean CheckProfileData(ImagePlus imp) {
String[] warnstr, errstr;
int i, nwarn = 0, nerr = 0;
warnstr = new String[9];
errstr = new String[9];
Calibration c = imp.getCalibration();
if (c.getUnit().equals(" ")) {
errstr[nerr++] = "The scale has not been set.";
}
if (this.getNumPoints("presynaptic element") == 0) {
errstr[nerr++] = "Presynaptic element not defined.";
}
if (this.getNumPoints("postsynaptic element") == 0) {
errstr[nerr++] = "Postsynaptic element not defined.";
}
if (this.getNum("postsynaptic density") == 0) {
warnstr[nwarn++] = "Postsynaptic density not defined.";
}
if (this.getNumPoints("points") == 0) {
warnstr[nwarn++] = "No point coordinates defined.";
}
if (nerr > 0) {
IJ.error("Synapse", "Error: " + errstr[0]);
return false;
}
if (nwarn > 0) {
for (i = 0; i < nwarn; i++) {
YesNoCancelDialog d = new YesNoCancelDialog(imp.getWindow(),
"Synapse", "Warning: " + warnstr[i] + " Continue anyway?");
if (!d.yesPressed()) {
return false;
}
}
}
return true;
}
public boolean save(ImagePlus imp) {
int i, j;
double pixelwidth;
String s, unit;
Polygon pol;
IJ.showStatus("Saving profile...");
if (!CheckProfileData(imp)) {
return false;
}
Calibration c = imp.getCalibration();
if (c.pixelWidth != c.pixelHeight) {
IJ.showMessage("Warning: pixel aspect ratio is not 1. " +
"Only pixel WIDTH is used.");
}
try {
if (!imp.getTitle().equals(this.prevImg)) {
this.n = 0;
this.prevImg = imp.getTitle();
}
this.n++;
s = IJ.getString("Profile ID: ", IJ.d2s(this.ntot, 0));
if (!s.equals("")) {
this.ID = s;
}
SaveDialog sd = new SaveDialog("Save profile",
imp.getTitle() + "." +
IJ.d2s(this.n, 0), ".syn");
if (sd.getFileName() == null) {
this.n--;
return false;
}
PrintWriter outf =
new PrintWriter(
new BufferedWriter(
new FileWriter(sd.getDirectory() +
sd.getFileName())));
String versionInfo = String.format("# %s version %s (%s %s, %s)",
VersionSyn.title,
VersionSyn.version,
VersionSyn.month,
VersionSyn.day,
VersionSyn.year);
outf.println(versionInfo);
outf.println("IMAGE " + imp.getTitle());
outf.println("PROFILE_ID " + this.ID);
outf.println("COMMENT " + this.comment);
if (c.getUnit().equals("micron")) {
pixelwidth = c.pixelWidth * 1000;
unit = "nm";
} else {
pixelwidth = c.pixelWidth;
unit = c.getUnit();
}
outf.println("PIXELWIDTH " + IJ.d2s(pixelwidth) + " " + unit);
outf.println("PRESYNAPTIC_PROFILE " + this.preProfile);
outf.println("POSTSYNAPTIC_PROFILE " + this.poProfile);
outf.println("POSTSYNAPTIC_ELEMENT");
pol = this.overlay.get(this.overlay.getIndex("postsynaptic element")).getPolygon();
for (i = 0; i < pol.npoints; i++) {
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", "+ IJ.d2s(pol.ypoints[i], 0));
}
outf.println("END");
outf.println("PRESYNAPTIC_ELEMENT");
pol = this.overlay.get(this.overlay.getIndex("presynaptic element")).getPolygon();
for (i = 0; i < pol.npoints; i++) {
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", "+ IJ.d2s(pol.ypoints[i], 0));
}
outf.println("END");
for (j = 0; j < this.overlay.size(); j++) {
if (this.overlay.get(j).getName().equals("postsynaptic density")) {
outf.println("POSTSYNAPTIC_DENSITY");
pol = this.overlay.get(j).getPolygon();
for (i = 0; i < pol.npoints; i++)
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", "+ IJ.d2s(pol.ypoints[i], 0));
outf.println("END");
}
}
for (j = 0; j < this.overlay.size(); j++) {
if (this.overlay.get(j).getName().equals("hole")) {
outf.println("HOLE");
pol = this.overlay.get(j).getPolygon();
for (i = 0; i < pol.npoints; i++)
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", "+ IJ.d2s(pol.ypoints[i], 0));
outf.println("END");
}
}
if (this.overlay.getIndex("points") != -1) {
outf.println("POINTS");
pol = this.overlay.get(this.overlay.getIndex("points")).getPolygon();
for (i = 0; i < pol.npoints; i++) {
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", " + IJ.d2s(pol.ypoints[i], 0));
}
outf.println("END");
}
if (this.overlay.getIndex("random points") != -1) {
outf.println("RANDOM_POINTS");
pol = this.overlay.get(this.overlay.getIndex("random points")).getPolygon();
for (i = 0; i < pol.npoints; i++) {
outf.println(" " + IJ.d2s(pol.xpoints[i], 0) + ", " + IJ.d2s(pol.ypoints[i], 0));
}
outf.println("END");
}
outf.close();
} catch (Exception e) {
return false;
}
writeIDtext(imp);
drawComponents(imp);
this.ntot++;
SaveDialog sd = new SaveDialog("Save analyzed image",
imp.getShortTitle(),
".a.tif");
if (sd.getFileName() != null) {
FileSaver saveTiff = new FileSaver(imp);
saveTiff.saveAsTiff(sd.getDirectory() + sd.getFileName());
}
return true;
}
private void drawComponents(ImagePlus imp) {
Polygon pol;
int n, x, y;
for (n = 0; n < this.overlay.size(); n++) {
imp.setColor(this.overlay.get(n).getStrokeColor());
if (this.overlay.get(n).getName().equals("points")) {
pol = this.overlay.get(n).getPolygon();
for (i = 0; i < pol.npoints; i++) {
x = pol.xpoints[i];
y = pol.ypoints[i];
imp.getProcessor().drawLine(x - 3, y, x + 3, y);
imp.getProcessor().drawLine(x, y - 3, x, y + 3);
}
} else {
this.overlay.get(n).drawPixels(imp.getProcessor());
}
}
}
private Point findxy(Polygon pol, ImagePlus imp) {
int miny, x;
miny = imp.getHeight();
x = imp.getWidth();
for (i = 0; i < pol.npoints; i++) {
if (pol.ypoints[i] < miny) {
miny = pol.ypoints[i];
x = pol.xpoints[i];
if (pol.ypoints[i] == miny && pol.xpoints[i] < x) {
x = pol.xpoints[i];
}
}
}
return new Point(x, miny);
}
private void writeIDtext(ImagePlus imp) {
TextRoi profileLabel;
Polygon pol;
Point p;
int locx, locy, size;
String label = "";
for (i = 0; i < this.ID.length(); i++) {
label += this.ID.charAt(i);
}
size = imp.getHeight() / 42; // adjust font size for image size (by an arbitrary factor)
TextRoi.setFont(TextRoi.getFont(), size, Font.BOLD);
profileLabel = new TextRoi(0, 0, label);
profileLabel.setAntialiased(true);
pol = this.overlay.get(this.overlay.getIndex("postsynaptic element")).getPolygon();
p = findxy(pol, imp);
locy = p.y - profileLabel.getBounds().height;
locx = p.x - profileLabel.getBounds().width;
if (locx < 0) locx = 3;
if (locy < 0) locy = 3;
profileLabel.setLocation(locx, locy);
imp.setColor(textCol);
profileLabel.drawPixels(imp.getProcessor());
imp.setColor(Color.black);
}
public void clear() {
this.dirty = false;
this.overlay.clear();
this.comment = "";
this.preProfile = "";
this.poProfile = "";
}
/**
* Main method for debugging.
*
* For debugging, it is convenient to have a method that starts ImageJ, loads an
* image and calls the plugin, e.g. after setting breakpoints.
*
* @param args unused
*/
public static void main(String[] args) {
// set the plugins.dir property to make the plugin appear in the Plugins menu
Class<?> cl = Synapse_.class;
String url = cl.getResource("/" + cl.getName().replace('.', '/') + ".class").toString();
String pluginsDir = url.substring(5, url.length() - cl.getName().length() - 6);
System.setProperty("plugins.dir", pluginsDir);
// start ImageJ
new ImageJ();
ImagePlus image = IJ.openImage("");
image.show();
// run the plugin
IJ.runPlugIn(cl.getName(), "");
}
} // end of Synapse
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@InterfaceAudience.Private
public class SyncTable extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(SyncTable.class);
static final String SOURCE_HASH_DIR_CONF_KEY = "sync.table.source.hash.dir";
static final String SOURCE_TABLE_CONF_KEY = "sync.table.source.table.name";
static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name";
static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster";
static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster";
static final String DRY_RUN_CONF_KEY = "sync.table.dry.run";
static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes";
static final String DO_PUTS_CONF_KEY = "sync.table.do.puts";
static final String IGNORE_TIMESTAMPS = "sync.table.ignore.timestamps";
Path sourceHashDir;
String sourceTableName;
String targetTableName;
String sourceZkCluster;
String targetZkCluster;
boolean dryRun;
boolean doDeletes = true;
boolean doPuts = true;
boolean ignoreTimestamps;
Counters counters;
public SyncTable(Configuration conf) {
super(conf);
}
private void initCredentialsForHBase(String zookeeper, Job job) throws IOException {
Configuration peerConf = HBaseConfiguration.createClusterConf(job
.getConfiguration(), zookeeper);
if("kerberos".equalsIgnoreCase(peerConf.get("hbase.security.authentication"))){
TableMapReduceUtil.initCredentialsForCluster(job, peerConf);
}
}
public Job createSubmittableJob(String[] args) throws IOException {
FileSystem fs = sourceHashDir.getFileSystem(getConf());
if (!fs.exists(sourceHashDir)) {
throw new IOException("Source hash dir not found: " + sourceHashDir);
}
Job job = Job.getInstance(getConf(),getConf().get("mapreduce.job.name",
"syncTable_" + sourceTableName + "-" + targetTableName));
Configuration jobConf = job.getConfiguration();
if ("kerberos".equalsIgnoreCase(jobConf.get("hadoop.security.authentication"))) {
TokenCache.obtainTokensForNamenodes(job.getCredentials(), new
Path[] { sourceHashDir }, getConf());
}
HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir);
LOG.info("Read source hash manifest: " + tableHash);
LOG.info("Read " + tableHash.partitions.size() + " partition keys");
if (!tableHash.tableName.equals(sourceTableName)) {
LOG.warn("Table name mismatch - manifest indicates hash was taken from: "
+ tableHash.tableName + " but job is reading from: " + sourceTableName);
}
if (tableHash.numHashFiles != tableHash.partitions.size() + 1) {
throw new RuntimeException("Hash data appears corrupt. The number of of hash files created"
+ " should be 1 more than the number of partition keys. However, the manifest file "
+ " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys"
+ " found in the partitions file is " + tableHash.partitions.size());
}
Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR);
int dataSubdirCount = 0;
for (FileStatus file : fs.listStatus(dataDir)) {
if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) {
dataSubdirCount++;
}
}
if (dataSubdirCount != tableHash.numHashFiles) {
throw new RuntimeException("Hash data appears corrupt. The number of of hash files created"
+ " should be 1 more than the number of partition keys. However, the number of data dirs"
+ " found is " + dataSubdirCount + " but the number of partition keys"
+ " found in the partitions file is " + tableHash.partitions.size());
}
job.setJarByClass(HashTable.class);
jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString());
jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName);
jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName);
if (sourceZkCluster != null) {
jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster);
initCredentialsForHBase(sourceZkCluster, job);
}
if (targetZkCluster != null) {
jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster);
initCredentialsForHBase(targetZkCluster, job);
}
jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun);
jobConf.setBoolean(DO_DELETES_CONF_KEY, doDeletes);
jobConf.setBoolean(DO_PUTS_CONF_KEY, doPuts);
jobConf.setBoolean(IGNORE_TIMESTAMPS, ignoreTimestamps);
TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(),
SyncMapper.class, null, null, job);
job.setNumReduceTasks(0);
if (dryRun) {
job.setOutputFormatClass(NullOutputFormat.class);
} else {
// No reducers. Just write straight to table. Call initTableReducerJob
// because it sets up the TableOutputFormat.
TableMapReduceUtil.initTableReducerJob(targetTableName, null, job, null,
targetZkCluster, null, null);
// would be nice to add an option for bulk load instead
}
// Obtain an authentication token, for the specified cluster, on behalf of the current user
if (sourceZkCluster != null) {
Configuration peerConf =
HBaseConfiguration.createClusterConf(job.getConfiguration(), sourceZkCluster);
TableMapReduceUtil.initCredentialsForCluster(job, peerConf);
}
return job;
}
public static class SyncMapper extends TableMapper<ImmutableBytesWritable, Mutation> {
Path sourceHashDir;
Connection sourceConnection;
Connection targetConnection;
Table sourceTable;
Table targetTable;
boolean dryRun;
boolean doDeletes = true;
boolean doPuts = true;
boolean ignoreTimestamp;
HashTable.TableHash sourceTableHash;
HashTable.TableHash.Reader sourceHashReader;
ImmutableBytesWritable currentSourceHash;
ImmutableBytesWritable nextSourceKey;
HashTable.ResultHasher targetHasher;
Throwable mapperException;
public static enum Counter { BATCHES, HASHES_MATCHED, HASHES_NOT_MATCHED, SOURCEMISSINGROWS,
SOURCEMISSINGCELLS, TARGETMISSINGROWS, TARGETMISSINGCELLS, ROWSWITHDIFFS, DIFFERENTCELLVALUES,
MATCHINGROWS, MATCHINGCELLS, EMPTY_BATCHES, RANGESMATCHED, RANGESNOTMATCHED
}
@Override
protected void setup(Context context) throws IOException {
Configuration conf = context.getConfiguration();
sourceHashDir = new Path(conf.get(SOURCE_HASH_DIR_CONF_KEY));
sourceConnection = openConnection(conf, SOURCE_ZK_CLUSTER_CONF_KEY, null);
targetConnection = openConnection(conf, TARGET_ZK_CLUSTER_CONF_KEY,
TableOutputFormat.OUTPUT_CONF_PREFIX);
sourceTable = openTable(sourceConnection, conf, SOURCE_TABLE_CONF_KEY);
targetTable = openTable(targetConnection, conf, TARGET_TABLE_CONF_KEY);
dryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
doDeletes = conf.getBoolean(DO_DELETES_CONF_KEY, true);
doPuts = conf.getBoolean(DO_PUTS_CONF_KEY, true);
ignoreTimestamp = conf.getBoolean(IGNORE_TIMESTAMPS, false);
sourceTableHash = HashTable.TableHash.read(conf, sourceHashDir);
LOG.info("Read source hash manifest: " + sourceTableHash);
LOG.info("Read " + sourceTableHash.partitions.size() + " partition keys");
TableSplit split = (TableSplit) context.getInputSplit();
ImmutableBytesWritable splitStartKey = new ImmutableBytesWritable(split.getStartRow());
sourceHashReader = sourceTableHash.newReader(conf, splitStartKey);
findNextKeyHashPair();
// create a hasher, but don't start it right away
// instead, find the first hash batch at or after the start row
// and skip any rows that come before. they will be caught by the previous task
targetHasher = new HashTable.ResultHasher();
targetHasher.ignoreTimestamps = ignoreTimestamp;
}
private static Connection openConnection(Configuration conf, String zkClusterConfKey,
String configPrefix)
throws IOException {
String zkCluster = conf.get(zkClusterConfKey);
Configuration clusterConf = HBaseConfiguration.createClusterConf(conf,
zkCluster, configPrefix);
return ConnectionFactory.createConnection(clusterConf);
}
private static Table openTable(Connection connection, Configuration conf,
String tableNameConfKey) throws IOException {
return connection.getTable(TableName.valueOf(conf.get(tableNameConfKey)));
}
/**
* Attempt to read the next source key/hash pair.
* If there are no more, set nextSourceKey to null
*/
private void findNextKeyHashPair() throws IOException {
boolean hasNext = sourceHashReader.next();
if (hasNext) {
nextSourceKey = sourceHashReader.getCurrentKey();
} else {
// no more keys - last hash goes to the end
nextSourceKey = null;
}
}
@Override
protected void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException, InterruptedException {
try {
// first, finish any hash batches that end before the scanned row
while (nextSourceKey != null && key.compareTo(nextSourceKey) >= 0) {
moveToNextBatch(context);
}
// next, add the scanned row (as long as we've reached the first batch)
if (targetHasher.isBatchStarted()) {
targetHasher.hashResult(value);
}
} catch (Throwable t) {
mapperException = t;
Throwables.propagateIfInstanceOf(t, IOException.class);
Throwables.propagateIfInstanceOf(t, InterruptedException.class);
Throwables.propagate(t);
}
}
/**
* If there is an open hash batch, complete it and sync if there are diffs.
* Start a new batch, and seek to read the
*/
private void moveToNextBatch(Context context) throws IOException, InterruptedException {
if (targetHasher.isBatchStarted()) {
finishBatchAndCompareHashes(context);
}
targetHasher.startBatch(nextSourceKey);
currentSourceHash = sourceHashReader.getCurrentHash();
findNextKeyHashPair();
}
/**
* Finish the currently open hash batch.
* Compare the target hash to the given source hash.
* If they do not match, then sync the covered key range.
*/
private void finishBatchAndCompareHashes(Context context)
throws IOException, InterruptedException {
targetHasher.finishBatch();
context.getCounter(Counter.BATCHES).increment(1);
if (targetHasher.getBatchSize() == 0) {
context.getCounter(Counter.EMPTY_BATCHES).increment(1);
}
ImmutableBytesWritable targetHash = targetHasher.getBatchHash();
if (targetHash.equals(currentSourceHash)) {
context.getCounter(Counter.HASHES_MATCHED).increment(1);
} else {
context.getCounter(Counter.HASHES_NOT_MATCHED).increment(1);
ImmutableBytesWritable stopRow = nextSourceKey == null
? new ImmutableBytesWritable(sourceTableHash.stopRow)
: nextSourceKey;
if (LOG.isDebugEnabled()) {
LOG.debug("Hash mismatch. Key range: " + toHex(targetHasher.getBatchStartKey())
+ " to " + toHex(stopRow)
+ " sourceHash: " + toHex(currentSourceHash)
+ " targetHash: " + toHex(targetHash));
}
syncRange(context, targetHasher.getBatchStartKey(), stopRow);
}
}
private static String toHex(ImmutableBytesWritable bytes) {
return Bytes.toHex(bytes.get(), bytes.getOffset(), bytes.getLength());
}
private static final CellScanner EMPTY_CELL_SCANNER
= new CellScanner(Collections.<Result>emptyIterator());
/**
* Rescan the given range directly from the source and target tables.
* Count and log differences, and if this is not a dry run, output Puts and Deletes
* to make the target table match the source table for this range
*/
private void syncRange(Context context, ImmutableBytesWritable startRow,
ImmutableBytesWritable stopRow) throws IOException, InterruptedException {
Scan scan = sourceTableHash.initScan();
scan.withStartRow(startRow.copyBytes());
scan.withStopRow(stopRow.copyBytes());
ResultScanner sourceScanner = sourceTable.getScanner(scan);
CellScanner sourceCells = new CellScanner(sourceScanner.iterator());
ResultScanner targetScanner = targetTable.getScanner(new Scan(scan));
CellScanner targetCells = new CellScanner(targetScanner.iterator());
boolean rangeMatched = true;
byte[] nextSourceRow = sourceCells.nextRow();
byte[] nextTargetRow = targetCells.nextRow();
while(nextSourceRow != null || nextTargetRow != null) {
boolean rowMatched;
int rowComparison = compareRowKeys(nextSourceRow, nextTargetRow);
if (rowComparison < 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Target missing row: " + Bytes.toString(nextSourceRow));
}
context.getCounter(Counter.TARGETMISSINGROWS).increment(1);
rowMatched = syncRowCells(context, nextSourceRow, sourceCells, EMPTY_CELL_SCANNER);
nextSourceRow = sourceCells.nextRow(); // advance only source to next row
} else if (rowComparison > 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Source missing row: " + Bytes.toString(nextTargetRow));
}
context.getCounter(Counter.SOURCEMISSINGROWS).increment(1);
rowMatched = syncRowCells(context, nextTargetRow, EMPTY_CELL_SCANNER, targetCells);
nextTargetRow = targetCells.nextRow(); // advance only target to next row
} else {
// current row is the same on both sides, compare cell by cell
rowMatched = syncRowCells(context, nextSourceRow, sourceCells, targetCells);
nextSourceRow = sourceCells.nextRow();
nextTargetRow = targetCells.nextRow();
}
if (!rowMatched) {
rangeMatched = false;
}
}
sourceScanner.close();
targetScanner.close();
context.getCounter(rangeMatched ? Counter.RANGESMATCHED : Counter.RANGESNOTMATCHED)
.increment(1);
}
private static class CellScanner {
private final Iterator<Result> results;
private byte[] currentRow;
private Result currentRowResult;
private int nextCellInRow;
private Result nextRowResult;
public CellScanner(Iterator<Result> results) {
this.results = results;
}
/**
* Advance to the next row and return its row key.
* Returns null iff there are no more rows.
*/
public byte[] nextRow() {
if (nextRowResult == null) {
// no cached row - check scanner for more
while (results.hasNext()) {
nextRowResult = results.next();
Cell nextCell = nextRowResult.rawCells()[0];
if (currentRow == null
|| !Bytes.equals(currentRow, 0, currentRow.length, nextCell.getRowArray(),
nextCell.getRowOffset(), nextCell.getRowLength())) {
// found next row
break;
} else {
// found another result from current row, keep scanning
nextRowResult = null;
}
}
if (nextRowResult == null) {
// end of data, no more rows
currentRowResult = null;
currentRow = null;
return null;
}
}
// advance to cached result for next row
currentRowResult = nextRowResult;
nextCellInRow = 0;
currentRow = currentRowResult.getRow();
nextRowResult = null;
return currentRow;
}
/**
* Returns the next Cell in the current row or null iff none remain.
*/
public Cell nextCellInRow() {
if (currentRowResult == null) {
// nothing left in current row
return null;
}
Cell nextCell = currentRowResult.rawCells()[nextCellInRow];
nextCellInRow++;
if (nextCellInRow == currentRowResult.size()) {
if (results.hasNext()) {
Result result = results.next();
Cell cell = result.rawCells()[0];
if (Bytes.equals(currentRow, 0, currentRow.length, cell.getRowArray(),
cell.getRowOffset(), cell.getRowLength())) {
// result is part of current row
currentRowResult = result;
nextCellInRow = 0;
} else {
// result is part of next row, cache it
nextRowResult = result;
// current row is complete
currentRowResult = null;
}
} else {
// end of data
currentRowResult = null;
}
}
return nextCell;
}
}
private Cell checkAndResetTimestamp(Cell sourceCell){
if (ignoreTimestamp) {
sourceCell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setType(sourceCell.getType())
.setRow(sourceCell.getRowArray(),
sourceCell.getRowOffset(), sourceCell.getRowLength())
.setFamily(sourceCell.getFamilyArray(),
sourceCell.getFamilyOffset(), sourceCell.getFamilyLength())
.setQualifier(sourceCell.getQualifierArray(),
sourceCell.getQualifierOffset(), sourceCell.getQualifierLength())
.setTimestamp(EnvironmentEdgeManager.currentTime())
.setValue(sourceCell.getValueArray(),
sourceCell.getValueOffset(), sourceCell.getValueLength()).build();
}
return sourceCell;
}
/**
* Compare the cells for the given row from the source and target tables.
* Count and log any differences.
* If not a dry run, output a Put and/or Delete needed to sync the target table
* to match the source table.
*/
private boolean syncRowCells(Context context, byte[] rowKey, CellScanner sourceCells,
CellScanner targetCells) throws IOException, InterruptedException {
Put put = null;
Delete delete = null;
long matchingCells = 0;
boolean matchingRow = true;
Cell sourceCell = sourceCells.nextCellInRow();
Cell targetCell = targetCells.nextCellInRow();
while (sourceCell != null || targetCell != null) {
int cellKeyComparison = compareCellKeysWithinRow(sourceCell, targetCell);
if (cellKeyComparison < 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Target missing cell: " + sourceCell);
}
context.getCounter(Counter.TARGETMISSINGCELLS).increment(1);
matchingRow = false;
if (!dryRun && doPuts) {
if (put == null) {
put = new Put(rowKey);
}
sourceCell = checkAndResetTimestamp(sourceCell);
put.add(sourceCell);
}
sourceCell = sourceCells.nextCellInRow();
} else if (cellKeyComparison > 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Source missing cell: " + targetCell);
}
context.getCounter(Counter.SOURCEMISSINGCELLS).increment(1);
matchingRow = false;
if (!dryRun && doDeletes) {
if (delete == null) {
delete = new Delete(rowKey);
}
// add a tombstone to exactly match the target cell that is missing on the source
delete.addColumn(CellUtil.cloneFamily(targetCell),
CellUtil.cloneQualifier(targetCell), targetCell.getTimestamp());
}
targetCell = targetCells.nextCellInRow();
} else {
// the cell keys are equal, now check values
if (CellUtil.matchingValue(sourceCell, targetCell)) {
matchingCells++;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Different values: ");
LOG.debug(" source cell: " + sourceCell
+ " value: " + Bytes.toString(sourceCell.getValueArray(),
sourceCell.getValueOffset(), sourceCell.getValueLength()));
LOG.debug(" target cell: " + targetCell
+ " value: " + Bytes.toString(targetCell.getValueArray(),
targetCell.getValueOffset(), targetCell.getValueLength()));
}
context.getCounter(Counter.DIFFERENTCELLVALUES).increment(1);
matchingRow = false;
if (!dryRun && doPuts) {
// overwrite target cell
if (put == null) {
put = new Put(rowKey);
}
sourceCell = checkAndResetTimestamp(sourceCell);
put.add(sourceCell);
}
}
sourceCell = sourceCells.nextCellInRow();
targetCell = targetCells.nextCellInRow();
}
if (!dryRun && sourceTableHash.scanBatch > 0) {
if (put != null && put.size() >= sourceTableHash.scanBatch) {
context.write(new ImmutableBytesWritable(rowKey), put);
put = null;
}
if (delete != null && delete.size() >= sourceTableHash.scanBatch) {
context.write(new ImmutableBytesWritable(rowKey), delete);
delete = null;
}
}
}
if (!dryRun) {
if (put != null) {
context.write(new ImmutableBytesWritable(rowKey), put);
}
if (delete != null) {
context.write(new ImmutableBytesWritable(rowKey), delete);
}
}
if (matchingCells > 0) {
context.getCounter(Counter.MATCHINGCELLS).increment(matchingCells);
}
if (matchingRow) {
context.getCounter(Counter.MATCHINGROWS).increment(1);
return true;
} else {
context.getCounter(Counter.ROWSWITHDIFFS).increment(1);
return false;
}
}
/**
* Compare row keys of the given Result objects.
* Nulls are after non-nulls
*/
private static int compareRowKeys(byte[] r1, byte[] r2) {
if (r1 == null) {
return 1; // source missing row
} else if (r2 == null) {
return -1; // target missing row
} else {
// Sync on no META tables only. We can directly do what CellComparator is doing inside.
// Never the call going to MetaCellComparator.
return Bytes.compareTo(r1, 0, r1.length, r2, 0, r2.length);
}
}
/**
* Compare families, qualifiers, and timestamps of the given Cells.
* They are assumed to be of the same row.
* Nulls are after non-nulls.
*/
private int compareCellKeysWithinRow(Cell c1, Cell c2) {
if (c1 == null) {
return 1; // source missing cell
}
if (c2 == null) {
return -1; // target missing cell
}
int result = CellComparator.getInstance().compareFamilies(c1, c2);
if (result != 0) {
return result;
}
result = CellComparator.getInstance().compareQualifiers(c1, c2);
if (result != 0) {
return result;
}
if (this.ignoreTimestamp) {
return 0;
} else {
// note timestamp comparison is inverted - more recent cells first
return CellComparator.getInstance().compareTimestamps(c1, c2);
}
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
if (mapperException == null) {
try {
finishRemainingHashRanges(context);
} catch (Throwable t) {
mapperException = t;
}
}
try {
sourceTable.close();
targetTable.close();
sourceConnection.close();
targetConnection.close();
} catch (Throwable t) {
if (mapperException == null) {
mapperException = t;
} else {
LOG.error("Suppressing exception from closing tables", t);
}
}
// propagate first exception
if (mapperException != null) {
Throwables.propagateIfInstanceOf(mapperException, IOException.class);
Throwables.propagateIfInstanceOf(mapperException, InterruptedException.class);
Throwables.propagate(mapperException);
}
}
private void finishRemainingHashRanges(Context context) throws IOException,
InterruptedException {
TableSplit split = (TableSplit) context.getInputSplit();
byte[] splitEndRow = split.getEndRow();
boolean reachedEndOfTable = HashTable.isTableEndRow(splitEndRow);
// if there are more hash batches that begin before the end of this split move to them
while (nextSourceKey != null
&& (nextSourceKey.compareTo(splitEndRow) < 0 || reachedEndOfTable)) {
moveToNextBatch(context);
}
if (targetHasher.isBatchStarted()) {
// need to complete the final open hash batch
if ((nextSourceKey != null && nextSourceKey.compareTo(splitEndRow) > 0)
|| (nextSourceKey == null && !Bytes.equals(splitEndRow, sourceTableHash.stopRow))) {
// the open hash range continues past the end of this region
// add a scan to complete the current hash range
Scan scan = sourceTableHash.initScan();
scan.withStartRow(splitEndRow);
if (nextSourceKey == null) {
scan.withStopRow(sourceTableHash.stopRow);
} else {
scan.withStopRow(nextSourceKey.copyBytes());
}
ResultScanner targetScanner = null;
try {
targetScanner = targetTable.getScanner(scan);
for (Result row : targetScanner) {
targetHasher.hashResult(row);
}
} finally {
if (targetScanner != null) {
targetScanner.close();
}
}
} // else current batch ends exactly at split end row
finishBatchAndCompareHashes(context);
}
}
}
private static final int NUM_ARGS = 3;
private static void printUsage(final String errorMsg) {
if (errorMsg != null && errorMsg.length() > 0) {
System.err.println("ERROR: " + errorMsg);
System.err.println();
}
System.err.println("Usage: SyncTable [options] <sourcehashdir> <sourcetable> <targettable>");
System.err.println();
System.err.println("Options:");
System.err.println(" sourcezkcluster ZK cluster key of the source table");
System.err.println(" (defaults to cluster in classpath's config)");
System.err.println(" targetzkcluster ZK cluster key of the target table");
System.err.println(" (defaults to cluster in classpath's config)");
System.err.println(" dryrun if true, output counters but no writes");
System.err.println(" (defaults to false)");
System.err.println(" doDeletes if false, does not perform deletes");
System.err.println(" (defaults to true)");
System.err.println(" doPuts if false, does not perform puts");
System.err.println(" (defaults to true)");
System.err.println(" ignoreTimestamps if true, ignores cells timestamps while comparing ");
System.err.println(" cell values. Any missing cell on target then gets");
System.err.println(" added with current time as timestamp ");
System.err.println(" (defaults to false)");
System.err.println();
System.err.println("Args:");
System.err.println(" sourcehashdir path to HashTable output dir for source table");
System.err.println(" (see org.apache.hadoop.hbase.mapreduce.HashTable)");
System.err.println(" sourcetable Name of the source table to sync from");
System.err.println(" targettable Name of the target table to sync to");
System.err.println();
System.err.println("Examples:");
System.err.println(" For a dry run SyncTable of tableA from a remote source cluster");
System.err.println(" to a local target cluster:");
System.err.println(" $ hbase " +
"org.apache.hadoop.hbase.mapreduce.SyncTable --dryrun=true"
+ " --sourcezkcluster=zk1.example.com,zk2.example.com,zk3.example.com:2181:/hbase"
+ " hdfs://nn:9000/hashes/tableA tableA tableA");
}
private boolean doCommandLine(final String[] args) {
if (args.length < NUM_ARGS) {
printUsage(null);
return false;
}
try {
sourceHashDir = new Path(args[args.length - 3]);
sourceTableName = args[args.length - 2];
targetTableName = args[args.length - 1];
for (int i = 0; i < args.length - NUM_ARGS; i++) {
String cmd = args[i];
if (cmd.equals("-h") || cmd.startsWith("--h")) {
printUsage(null);
return false;
}
final String sourceZkClusterKey = "--sourcezkcluster=";
if (cmd.startsWith(sourceZkClusterKey)) {
sourceZkCluster = cmd.substring(sourceZkClusterKey.length());
continue;
}
final String targetZkClusterKey = "--targetzkcluster=";
if (cmd.startsWith(targetZkClusterKey)) {
targetZkCluster = cmd.substring(targetZkClusterKey.length());
continue;
}
final String dryRunKey = "--dryrun=";
if (cmd.startsWith(dryRunKey)) {
dryRun = Boolean.parseBoolean(cmd.substring(dryRunKey.length()));
continue;
}
final String doDeletesKey = "--doDeletes=";
if (cmd.startsWith(doDeletesKey)) {
doDeletes = Boolean.parseBoolean(cmd.substring(doDeletesKey.length()));
continue;
}
final String doPutsKey = "--doPuts=";
if (cmd.startsWith(doPutsKey)) {
doPuts = Boolean.parseBoolean(cmd.substring(doPutsKey.length()));
continue;
}
final String ignoreTimestampsKey = "--ignoreTimestamps=";
if (cmd.startsWith(ignoreTimestampsKey)) {
ignoreTimestamps = Boolean.parseBoolean(cmd.substring(ignoreTimestampsKey.length()));
continue;
}
printUsage("Invalid argument '" + cmd + "'");
return false;
}
} catch (Exception e) {
LOG.error("Failed to parse commandLine arguments", e);
printUsage("Can't start because " + e.getMessage());
return false;
}
return true;
}
/**
* Main entry point.
*/
public static void main(String[] args) throws Exception {
int ret = ToolRunner.run(new SyncTable(HBaseConfiguration.create()), args);
System.exit(ret);
}
@Override
public int run(String[] args) throws Exception {
String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
if (!doCommandLine(otherArgs)) {
return 1;
}
Job job = createSubmittableJob(otherArgs);
if (!job.waitForCompletion(true)) {
LOG.info("Map-reduce job failed!");
return 1;
}
counters = job.getCounters();
return 0;
}
}
| |
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* -----------------------------
* DefaultPolarItemRenderer.java
* -----------------------------
* (C) Copyright 2004, 2006, 2007, by Solution Engineering, Inc. and
* Contributors.
*
* Original Author: Daniel Bridenbecker, Solution Engineering, Inc.;
* Contributor(s): David Gilbert (for Object Refinery Limited);
*
* $Id: DefaultPolarItemRenderer.java,v 1.7.2.6 2007/02/02 15:52:24 mungady Exp $
*
* Changes
* -------
* 19-Jan-2004 : Version 1, contributed by DB with minor changes by DG (DG);
* 15-Jul-2004 : Switched getX() with getXValue() and getY() with
* getYValue() (DG);
* 04-Oct-2004 : Renamed BooleanUtils --> BooleanUtilities (DG);
* 20-Apr-2005 : Update for change to LegendItem class (DG);
* ------------- JFREECHART 1.0.x ---------------------------------------------
* 04-Aug-2006 : Implemented equals() and clone() (DG);
* 02-Feb-2007 : Removed author tags from all over JFreeChart sources (DG);
*
*/
package org.jfree.chart.renderer;
import java.awt.AlphaComposite;
import java.awt.Composite;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Point;
import java.awt.Polygon;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Rectangle2D;
import java.util.Iterator;
import java.util.List;
import org.jfree.chart.LegendItem;
import org.jfree.chart.axis.NumberTick;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.plot.DrawingSupplier;
import org.jfree.chart.plot.PlotRenderingInfo;
import org.jfree.chart.plot.PolarPlot;
import org.jfree.data.xy.XYDataset;
import org.jfree.text.TextUtilities;
import org.jfree.ui.TextAnchor;
import org.jfree.util.BooleanList;
import org.jfree.util.BooleanUtilities;
/**
* A renderer that can be used with the {@link PolarPlot} class.
*/
public class DefaultPolarItemRenderer extends AbstractRenderer
implements PolarItemRenderer {
/** The plot that the renderer is assigned to. */
private PolarPlot plot;
/** Flags that control whether the renderer fills each series or not. */
private BooleanList seriesFilled;
/**
* Creates a new instance of DefaultPolarItemRenderer
*/
public DefaultPolarItemRenderer() {
this.seriesFilled = new BooleanList();
}
/**
* Returns the drawing supplier from the plot.
*
* @return The drawing supplier.
*/
public DrawingSupplier getDrawingSupplier() {
DrawingSupplier result = null;
PolarPlot p = getPlot();
if (p != null) {
result = p.getDrawingSupplier();
}
return result;
}
/**
* Set the plot associated with this renderer.
*
* @param plot the plot.
*/
public void setPlot(PolarPlot plot) {
this.plot = plot;
}
/**
* Return the plot associated with this renderer.
*
* @return The plot.
*/
public PolarPlot getPlot() {
return this.plot;
}
/**
* Plots the data for a given series.
*
* @param g2 the drawing surface.
* @param dataArea the data area.
* @param info collects plot rendering info.
* @param plot the plot.
* @param dataset the dataset.
* @param seriesIndex the series index.
*/
public void drawSeries(Graphics2D g2,
Rectangle2D dataArea,
PlotRenderingInfo info,
PolarPlot plot,
XYDataset dataset,
int seriesIndex) {
Polygon poly = new Polygon();
int numPoints = dataset.getItemCount(seriesIndex);
for (int i = 0; i < numPoints; i++) {
double theta = dataset.getXValue(seriesIndex, i);
double radius = dataset.getYValue(seriesIndex, i);
Point p = plot.translateValueThetaRadiusToJava2D(theta, radius,
dataArea);
poly.addPoint(p.x, p.y);
}
g2.setPaint(getSeriesPaint(seriesIndex));
g2.setStroke(getSeriesStroke(seriesIndex));
if (isSeriesFilled(seriesIndex)) {
Composite savedComposite = g2.getComposite();
g2.setComposite(AlphaComposite.getInstance(
AlphaComposite.SRC_OVER, 0.5f));
g2.fill(poly);
g2.setComposite(savedComposite);
}
else {
g2.draw(poly);
}
}
/**
* Returns <code>true</code> if the renderer should fill the specified
* series, and <code>false</code> otherwise.
*
* @param series the series index (zero-based).
*
* @return A boolean.
*/
public boolean isSeriesFilled(int series) {
boolean result = false;
Boolean b = this.seriesFilled.getBoolean(series);
if (b != null) {
result = b.booleanValue();
}
return result;
}
/**
* Sets a flag that controls whether or not a series is filled.
*
* @param series the series index.
* @param filled the flag.
*/
public void setSeriesFilled(int series, boolean filled) {
this.seriesFilled.setBoolean(series, BooleanUtilities.valueOf(filled));
}
/**
* Draw the angular gridlines - the spokes.
*
* @param g2 the drawing surface.
* @param plot the plot.
* @param ticks the ticks.
* @param dataArea the data area.
*/
public void drawAngularGridLines(Graphics2D g2,
PolarPlot plot,
List ticks,
Rectangle2D dataArea) {
g2.setFont(plot.getAngleLabelFont());
g2.setStroke(plot.getAngleGridlineStroke());
g2.setPaint(plot.getAngleGridlinePaint());
double axisMin = plot.getAxis().getLowerBound();
double maxRadius = plot.getMaxRadius();
Point center = plot.translateValueThetaRadiusToJava2D(axisMin, axisMin,
dataArea);
Iterator iterator = ticks.iterator();
while (iterator.hasNext()) {
NumberTick tick = (NumberTick) iterator.next();
Point p = plot.translateValueThetaRadiusToJava2D(
tick.getNumber().doubleValue(), maxRadius, dataArea);
g2.setPaint(plot.getAngleGridlinePaint());
g2.drawLine(center.x, center.y, p.x, p.y);
if (plot.isAngleLabelsVisible()) {
int x = p.x;
int y = p.y;
g2.setPaint(plot.getAngleLabelPaint());
TextUtilities.drawAlignedString(tick.getText(), g2, x, y,
TextAnchor.CENTER);
}
}
}
/**
* Draw the radial gridlines - the rings.
*
* @param g2 the drawing surface.
* @param plot the plot.
* @param radialAxis the radial axis.
* @param ticks the ticks.
* @param dataArea the data area.
*/
public void drawRadialGridLines(Graphics2D g2,
PolarPlot plot,
ValueAxis radialAxis,
List ticks,
Rectangle2D dataArea) {
g2.setFont(radialAxis.getTickLabelFont());
g2.setPaint(plot.getRadiusGridlinePaint());
g2.setStroke(plot.getRadiusGridlineStroke());
double axisMin = radialAxis.getLowerBound();
Point center = plot.translateValueThetaRadiusToJava2D(axisMin, axisMin,
dataArea);
Iterator iterator = ticks.iterator();
while (iterator.hasNext()) {
NumberTick tick = (NumberTick) iterator.next();
Point p = plot.translateValueThetaRadiusToJava2D(90.0,
tick.getNumber().doubleValue(), dataArea);
int r = p.x - center.x;
int upperLeftX = center.x - r;
int upperLeftY = center.y - r;
int d = 2 * r;
Ellipse2D ring = new Ellipse2D.Double(upperLeftX, upperLeftY, d, d);
g2.setPaint(plot.getRadiusGridlinePaint());
g2.draw(ring);
}
}
/**
* Return the legend for the given series.
*
* @param series the series index.
*
* @return The legend item.
*/
public LegendItem getLegendItem(int series) {
LegendItem result = null;
PolarPlot polarPlot = getPlot();
if (polarPlot != null) {
XYDataset dataset;
dataset = polarPlot.getDataset();
if (dataset != null) {
String label = dataset.getSeriesKey(series).toString();
String description = label;
Shape shape = getSeriesShape(series);
Paint paint = getSeriesPaint(series);
Paint outlinePaint = getSeriesOutlinePaint(series);
Stroke outlineStroke = getSeriesOutlineStroke(series);
result = new LegendItem(label, description, null, null,
shape, paint, outlineStroke, outlinePaint);
}
}
return result;
}
/**
* Tests this renderer for equality with an arbitrary object.
*
* @param obj the object (<code>null</code> not permitted).
*
* @return <code>true</code> if this renderer is equal to <code>obj</code>,
* and <code>false</code> otherwise.
*/
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof DefaultPolarItemRenderer)) {
return false;
}
DefaultPolarItemRenderer that = (DefaultPolarItemRenderer) obj;
if (!this.seriesFilled.equals(that.seriesFilled)) {
return false;
}
return super.equals(obj);
}
/**
* Returns a clone of the renderer.
*
* @return A clone.
*
* @throws CloneNotSupportedException if the renderer cannot be cloned.
*/
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.servlets.post.impl;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.IdentityHashMap;
import java.util.Map;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.servlets.HtmlResponse;
import org.apache.sling.servlets.post.PostOperation;
import org.apache.sling.servlets.post.PostResponse;
import org.apache.sling.servlets.post.SlingPostOperation;
import org.apache.sling.servlets.post.SlingPostProcessor;
import org.apache.sling.servlets.post.impl.helper.HtmlResponseProxy;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The <code>PostOperationProxyProvider</code> listens for legacy
* {@link SlingPostOperation} services being registered and wraps them with a
* proxy for the new {@link PostOperation} API and registers the procies.
*/
@Component(service = {})
public class PostOperationProxyProvider implements ServiceListener {
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* The service listener filter to listen for SlingPostOperation services
*/
private static final String REFERENCE_FILTER = "(" + Constants.OBJECTCLASS
+ "=" + SlingPostOperation.SERVICE_NAME + ")";
// maps references to the SlingPostOperation services to the registrations
// of the PostOperation proxies for unregistration purposes
private final Map<ServiceReference, ServiceRegistration> proxies = new IdentityHashMap<>();
// The DS component context to access the services to proxy
private BundleContext bundleContext;
// DS activation/deactivation
/**
* Activates the proxy provider component:
* <ol>
* <li>Keep BundleContext reference</li>
* <li>Start listening for SlingPostOperation services</li>
* <li>Register proxies for all existing SlingPostOperation services</li>
* </ol>
*/
@SuppressWarnings("unused")
@Activate
private void activate(final BundleContext bundleContext) {
this.bundleContext = bundleContext;
try {
bundleContext.addServiceListener(this, REFERENCE_FILTER);
final ServiceReference[] serviceReferences = bundleContext.getServiceReferences(
SlingPostOperation.SERVICE_NAME, null);
if (serviceReferences != null) {
for (ServiceReference serviceReference : serviceReferences) {
register(serviceReference);
}
}
} catch (InvalidSyntaxException ise) {
// not expected for tested static filter
// TODO:log !!
}
}
/**
* Deactivates the proxy provide component:
* <ol>
* <li>Unregister as a service listener</li>
* <li>Unregister all proxies</li>
* <li>Drop BundleContext reference</li>
* </ol>
*/
@SuppressWarnings("unused")
@Deactivate
private void deactivate() {
this.bundleContext.removeServiceListener(this);
final ServiceReference[] serviceReferences;
synchronized (this.proxies) {
serviceReferences = this.proxies.keySet().toArray(
new ServiceReference[this.proxies.size()]);
}
for (ServiceReference serviceReference : serviceReferences) {
unregister(serviceReference);
}
this.bundleContext = null;
}
// ServiceEvent handling
@Override
public void serviceChanged(ServiceEvent event) {
/*
* There is a slight chance for a race condition on deactivation where
* the component may be deactivating and the bundle context reference
* has been removed but the framework is still sending service events.
* In this situation we don't want to handle the event any way and so we
* can safely ignore it
*/
if (this.bundleContext == null) {
return;
}
switch (event.getType()) {
case ServiceEvent.REGISTERED:
register(event.getServiceReference());
break;
case ServiceEvent.MODIFIED:
update(event.getServiceReference());
break;
case ServiceEvent.UNREGISTERING:
unregister(event.getServiceReference());
break;
}
}
/**
* Access SlingPostOperation service and register proxy.
* <p>
* Called by serviceChanged
*/
private void register(final ServiceReference serviceReference) {
final SlingPostOperation service = (SlingPostOperation) this.bundleContext.getService(serviceReference);
final PostOperationProxy proxy = new PostOperationProxy(service);
final BundleContext bundleContext = serviceReference.getBundle().getBundleContext();
final Dictionary<String, Object> props = copyServiceProperties(serviceReference);
final ServiceRegistration reg = bundleContext.registerService(
PostOperation.SERVICE_NAME, proxy, props);
log.debug("Registering {}", proxy);
synchronized (this.proxies) {
this.proxies.put(serviceReference, reg);
}
}
/**
* Update proxy service registration properties
* <p>
* Called by serviceChanged
*/
private void update(final ServiceReference serviceReference) {
final ServiceRegistration proxyRegistration;
synchronized (this.proxies) {
proxyRegistration = this.proxies.get(serviceReference);
}
if (proxyRegistration != null) {
log.debug("Updating {}", proxyRegistration);
proxyRegistration.setProperties(copyServiceProperties(serviceReference));
}
}
/**
* Unregister proxy and unget SlingPostOperation service
* <p>
* Called by serviceChanged
*/
private void unregister(final ServiceReference serviceReference) {
final ServiceRegistration proxyRegistration;
synchronized (this.proxies) {
proxyRegistration = this.proxies.remove(serviceReference);
}
if (proxyRegistration != null) {
log.debug("Unregistering {}", proxyRegistration);
this.bundleContext.ungetService(serviceReference);
proxyRegistration.unregister();
}
}
// Helpers
/**
* Creates a Dictionary for use as the service registration properties of
* the PostOperation proxy.
*/
private Dictionary<String, Object> copyServiceProperties(
final ServiceReference serviceReference) {
final Dictionary<String, Object> props = new Hashtable<>();
for (String key : serviceReference.getPropertyKeys()) {
props.put(key, serviceReference.getProperty(key));
}
props.put(PostOperation.PROP_OPERATION_NAME,
serviceReference.getProperty(SlingPostOperation.PROP_OPERATION_NAME));
props.put(Constants.SERVICE_DESCRIPTION, "Proxy for "
+ serviceReference);
return props;
}
/**
* The <code>PostOperationProxy</code> is the proxy implementing the
* {@link PostOperation} service interface by calling the
* {@link SlingPostOperation} service.
*/
private class PostOperationProxy implements PostOperation {
private final SlingPostOperation delegatee;
PostOperationProxy(final SlingPostOperation delegatee) {
this.delegatee = delegatee;
}
@Override
public String toString() {
return getClass().getSimpleName() + " for " + delegatee.getClass().getName();
}
@Override
public void run(SlingHttpServletRequest request, PostResponse response,
SlingPostProcessor[] processors) {
HtmlResponse apiResponse = new HtmlResponseProxy(response);
delegatee.run(request, apiResponse, processors);
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.MethodSignatureUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.util.RefactoringChangeUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class ChangeContextUtil {
private static final Logger LOG = Logger.getInstance(ChangeContextUtil.class);
private static final Key<ASTNode> HARD_REF_TO_AST = Key.create("HARD_REF_TO_AST");
private static final Key<String> ENCODED_KEY = Key.create("ENCODED_KEY");
private static final Key<PsiClass> THIS_QUALIFIER_CLASS_KEY = Key.create("THIS_QUALIFIER_CLASS_KEY");
public static final Key<PsiMember> REF_MEMBER_KEY = Key.create("REF_MEMBER_KEY");
public static final Key<Boolean> CAN_REMOVE_QUALIFIER_KEY = Key.create("CAN_REMOVE_QUALIFIER_KEY");
public static final Key<PsiClass> REF_CLASS_KEY = Key.create("REF_CLASS_KEY");
private static final Key<PsiClass> REF_MEMBER_THIS_CLASS_KEY = Key.create("REF_MEMBER_THIS_CLASS_KEY");
private ChangeContextUtil() {}
public static void encodeContextInfo(PsiElement scope, boolean includeRefClasses) {
encodeContextInfo(scope, scope, includeRefClasses, true);
}
public static void encodeContextInfo(PsiElement scope, boolean includeRefClasses, boolean canChangeQualifier) {
encodeContextInfo(scope, scope, includeRefClasses, canChangeQualifier);
}
private static void encodeContextInfo(PsiElement scope,
PsiElement topLevelScope,
boolean includeRefClasses,
boolean canChangeQualifier) {
if (scope instanceof StubBasedPsiElement) {
// as long as "scope" is reachable, don't let GC collect AST together with all the copyable user data
scope.putUserData(HARD_REF_TO_AST, scope.getNode());
}
if (scope instanceof PsiThisExpression){
scope.putCopyableUserData(ENCODED_KEY, "");
PsiThisExpression thisExpr = (PsiThisExpression)scope;
final PsiJavaCodeReferenceElement qualifier = thisExpr.getQualifier();
if (qualifier == null){
PsiClass thisClass = RefactoringChangeUtil.getThisClass(thisExpr);
if (thisClass != null && !(thisClass instanceof PsiAnonymousClass)){
thisExpr.putCopyableUserData(THIS_QUALIFIER_CLASS_KEY, thisClass);
}
}
else {
final PsiElement resolved = qualifier.resolve();
if (resolved instanceof PsiClass && resolved == topLevelScope) {
thisExpr.putCopyableUserData(THIS_QUALIFIER_CLASS_KEY, (PsiClass)topLevelScope);
}
}
}
else if (scope instanceof PsiReferenceExpression){
scope.putCopyableUserData(ENCODED_KEY, "");
PsiReferenceExpression refExpr = (PsiReferenceExpression)scope;
PsiExpression qualifier = refExpr.getQualifierExpression();
if (qualifier == null){
final JavaResolveResult resolveResult = refExpr.advancedResolve(false);
final PsiElement refElement = resolveResult.getElement();
if (refElement != null && !PsiTreeUtil.isAncestor(topLevelScope, refElement, false)){
if (refElement instanceof PsiClass){
if (includeRefClasses){
refExpr.putCopyableUserData(REF_CLASS_KEY, (PsiClass)refElement);
}
}
else if (refElement instanceof PsiMember){
refExpr.putCopyableUserData(REF_MEMBER_KEY, ( (PsiMember)refElement));
final PsiElement resolveScope = resolveResult.getCurrentFileResolveScope();
if (resolveScope instanceof PsiClass && !PsiTreeUtil.isAncestor(topLevelScope, resolveScope, false)) {
refExpr.putCopyableUserData(REF_MEMBER_THIS_CLASS_KEY, (PsiClass)resolveScope);
}
}
}
}
else if (canChangeQualifier) {
refExpr.putCopyableUserData(CAN_REMOVE_QUALIFIER_KEY, canRemoveQualifier(refExpr));
}
}
else if (includeRefClasses) {
PsiReference ref = scope.getReference();
if (ref != null){
scope.putCopyableUserData(ENCODED_KEY, "");
PsiElement refElement = ref.resolve();
if (refElement instanceof PsiClass && !PsiTreeUtil.isAncestor(topLevelScope, refElement, false)){
scope.putCopyableUserData(REF_CLASS_KEY, (PsiClass)refElement);
}
}
}
for(PsiElement child = scope.getFirstChild(); child != null; child = child.getNextSibling()){
encodeContextInfo(child, topLevelScope, includeRefClasses, canChangeQualifier);
}
}
@NotNull
public static PsiElement decodeContextInfo(@NotNull PsiElement scope,
@Nullable PsiClass thisClass,
@Nullable PsiExpression thisAccessExpr) throws IncorrectOperationException {
if (scope instanceof StubBasedPsiElement) {
scope.putUserData(HARD_REF_TO_AST, null);
}
if (scope.getCopyableUserData(ENCODED_KEY) != null) {
scope.putCopyableUserData(ENCODED_KEY, null);
if (scope instanceof PsiThisExpression) {
PsiThisExpression thisExpr = (PsiThisExpression)scope;
scope = decodeThisExpression(thisExpr, thisClass, thisAccessExpr);
}
else if (scope instanceof PsiReferenceExpression) {
scope = decodeReferenceExpression((PsiReferenceExpression)scope, thisAccessExpr, thisClass);
}
else {
PsiClass refClass = scope.getCopyableUserData(REF_CLASS_KEY);
scope.putCopyableUserData(REF_CLASS_KEY, null);
if (refClass != null && refClass.isValid()) {
PsiReference ref = scope.getReference();
if (ref != null) {
final String qualifiedName = refClass.getQualifiedName();
if (qualifiedName != null) {
if (JavaPsiFacade.getInstance(refClass.getProject()).findClass(qualifiedName, scope.getResolveScope()) != null) {
scope = ref.bindToElement(refClass);
}
}
}
}
}
}
if (scope instanceof PsiClass) {
if (thisAccessExpr != null) {
thisAccessExpr = (PsiExpression)qualifyThis(thisAccessExpr, thisClass);
}
}
PsiElement child = scope.getFirstChild();
while (child != null) {
child = decodeContextInfo(child, thisClass, thisAccessExpr).getNextSibling();
}
return scope;
}
private static PsiElement decodeThisExpression(PsiThisExpression thisExpr,
PsiClass thisClass,
PsiExpression thisAccessExpr) throws IncorrectOperationException {
final PsiJavaCodeReferenceElement qualifier = thisExpr.getQualifier();
PsiClass encodedQualifierClass = thisExpr.getCopyableUserData(THIS_QUALIFIER_CLASS_KEY);
thisExpr.putCopyableUserData(THIS_QUALIFIER_CLASS_KEY, null);
if (qualifier == null){
if (encodedQualifierClass != null && encodedQualifierClass.isValid()){
if (encodedQualifierClass.equals(thisClass) && thisAccessExpr != null && thisAccessExpr.isValid()){
if (thisAccessExpr instanceof PsiThisExpression) {
PsiJavaCodeReferenceElement thisAccessQualifier = ((PsiThisExpression)thisAccessExpr).getQualifier();
PsiElement resolve = thisAccessQualifier != null ? thisAccessQualifier.resolve() : null;
if (PsiTreeUtil.getParentOfType(thisExpr, PsiClass.class) == resolve) {
return thisExpr;
}
}
return thisExpr.replace(thisAccessExpr);
}
}
}
else {
PsiClass qualifierClass = (PsiClass)qualifier.resolve();
if (encodedQualifierClass == qualifierClass && thisClass != null) {
qualifier.bindToElement(thisClass);
}
else {
if (qualifierClass != null) {
if (qualifierClass.equals(thisClass) && thisAccessExpr != null && thisAccessExpr.isValid()) {
return thisExpr.replace(thisAccessExpr);
}
}
}
}
return thisExpr;
}
private static PsiReferenceExpression decodeReferenceExpression(@NotNull PsiReferenceExpression refExpr,
PsiExpression thisAccessExpr,
PsiClass thisClass) throws IncorrectOperationException {
PsiManager manager = refExpr.getManager();
PsiElementFactory factory = JavaPsiFacade.getElementFactory(manager.getProject());
PsiExpression qualifier = refExpr.getQualifierExpression();
if (qualifier == null){
PsiMember refMember = refExpr.getCopyableUserData(REF_MEMBER_KEY);
refExpr.putCopyableUserData(REF_MEMBER_KEY, null);
if (refMember != null && refMember.isValid()){
PsiClass containingClass = refMember.getContainingClass();
if (refMember.hasModifierProperty(PsiModifier.STATIC)){
PsiElement refElement = refExpr.resolve();
if (!manager.areElementsEquivalent(refMember, refElement)){
final PsiClass currentClass = PsiTreeUtil.getParentOfType(refExpr, PsiClass.class);
if (currentClass == null || !InheritanceUtil.isInheritorOrSelf(currentClass, containingClass, true)) {
refExpr.setQualifierExpression(factory.createReferenceExpression(containingClass));
}
}
}
else {
final PsiClass realParentClass = refExpr.getCopyableUserData(REF_MEMBER_THIS_CLASS_KEY);
refExpr.putCopyableUserData(REF_MEMBER_THIS_CLASS_KEY, null);
if (thisAccessExpr != null && thisClass != null && realParentClass != null &&
InheritanceUtil.isInheritorOrSelf(thisClass, realParentClass, true)) {
boolean needQualifier = true;
PsiElement refElement = refExpr.resolve();
if (refMember.equals(refElement) ||
(refElement instanceof PsiMethod && refMember instanceof PsiMethod &&
MethodSignatureUtil.isSuperMethod((PsiMethod)refMember, (PsiMethod)refElement))) {
if (thisAccessExpr instanceof PsiThisExpression && ((PsiThisExpression)thisAccessExpr).getQualifier() == null) {
//Trivial qualifier
needQualifier = false;
}
else {
final PsiClass currentClass = findThisClass(refExpr, refMember);
if (thisAccessExpr instanceof PsiThisExpression){
PsiJavaCodeReferenceElement thisQualifier = ((PsiThisExpression)thisAccessExpr).getQualifier();
PsiClass thisExprClass = thisQualifier != null
? (PsiClass)thisQualifier.resolve()
: RefactoringChangeUtil.getThisClass(refExpr);
if (thisExprClass != null && (thisExprClass.equals(currentClass) || thisExprClass.isInheritor(realParentClass, true))){ // qualifier is not necessary
needQualifier = false;
}
}
}
}
if (needQualifier){
refExpr.setQualifierExpression(thisAccessExpr);
}
}
else if (thisClass != null && realParentClass != null && PsiTreeUtil.isAncestor(realParentClass, thisClass, true)) {
PsiElement refElement = refExpr.resolve();
if (refElement != null && !manager.areElementsEquivalent(refMember, refElement)) {
refExpr = RefactoringChangeUtil.qualifyReference(refExpr, refMember, null);
}
}
}
}
else {
PsiClass refClass = refExpr.getCopyableUserData(REF_CLASS_KEY);
refExpr.putCopyableUserData(REF_CLASS_KEY, null);
if (refClass != null && refClass.isValid()){
refExpr = (PsiReferenceExpression)refExpr.bindToElement(refClass);
}
}
}
else{
Boolean couldRemove = refExpr.getCopyableUserData(CAN_REMOVE_QUALIFIER_KEY);
refExpr.putCopyableUserData(CAN_REMOVE_QUALIFIER_KEY, null);
if (couldRemove == Boolean.FALSE && canRemoveQualifier(refExpr)){
PsiReferenceExpression newRefExpr = (PsiReferenceExpression)factory.createExpressionFromText(
refExpr.getReferenceName(), null);
refExpr = (PsiReferenceExpression)refExpr.replace(newRefExpr);
}
}
return refExpr;
}
private static PsiClass findThisClass(PsiReferenceExpression refExpr, PsiMember refMember) {
LOG.assertTrue(refExpr.getQualifierExpression() == null);
final PsiClass refMemberClass = refMember.getContainingClass();
if (refMemberClass == null) return null;
PsiElement parent = refExpr.getContext();
while(parent != null){
if (parent instanceof PsiClass){
if (parent.equals(refMemberClass) || ((PsiClass)parent).isInheritor(refMemberClass, true)){
return (PsiClass)parent;
}
}
parent = parent.getContext();
}
return refMemberClass;
}
public static boolean canRemoveQualifier(PsiReferenceExpression refExpr) {
try{
PsiExpression qualifier = refExpr.getQualifierExpression();
if (!(qualifier instanceof PsiReferenceExpression)) return false;
if (refExpr.getTypeParameters().length > 0) return false;
PsiElement qualifierRefElement = ((PsiReferenceExpression)qualifier).resolve();
if (!(qualifierRefElement instanceof PsiClass)) return false;
PsiElement refElement = refExpr.resolve();
if (refElement == null) return false;
PsiElementFactory factory = JavaPsiFacade.getElementFactory(refExpr.getProject());
if (refExpr.getParent() instanceof PsiMethodCallExpression){
PsiMethodCallExpression methodCall = (PsiMethodCallExpression)refExpr.getParent();
PsiMethodCallExpression newMethodCall = (PsiMethodCallExpression)factory.createExpressionFromText(
refExpr.getReferenceName() + "()", refExpr);
newMethodCall.getArgumentList().replace(methodCall.getArgumentList());
PsiElement newRefElement = newMethodCall.getMethodExpression().resolve();
return refElement.equals(newRefElement);
}
else if (refExpr instanceof PsiMethodReferenceExpression) {
return false;
}
else {
PsiReferenceExpression newRefExpr = (PsiReferenceExpression)factory.createExpressionFromText(
refExpr.getReferenceName(), refExpr);
PsiElement newRefElement = newRefExpr.resolve();
return refElement.equals(newRefElement);
}
}
catch(IncorrectOperationException e){
LOG.error(e);
return false;
}
}
private static PsiElement qualifyThis(PsiElement scope, PsiClass thisClass) throws IncorrectOperationException {
if (scope instanceof PsiThisExpression){
PsiThisExpression thisExpr = (PsiThisExpression)scope;
if (thisExpr.getQualifier() == null){
if (thisClass instanceof PsiAnonymousClass) return null;
return RefactoringChangeUtil.createThisExpression(thisClass.getManager(), thisClass);
}
}
else if (!(scope instanceof PsiClass)){
for(PsiElement child = scope.getFirstChild(); child != null; child = child.getNextSibling()){
if (qualifyThis(child, thisClass) == null) return null;
}
}
return scope;
}
public static void clearContextInfo(PsiElement scope) {
if (scope instanceof StubBasedPsiElement) {
scope.putUserData(HARD_REF_TO_AST, null);
}
scope.putCopyableUserData(ENCODED_KEY, null);
scope.putCopyableUserData(THIS_QUALIFIER_CLASS_KEY, null);
scope.putCopyableUserData(REF_MEMBER_KEY, null);
scope.putCopyableUserData(CAN_REMOVE_QUALIFIER_KEY, null);
scope.putCopyableUserData(REF_CLASS_KEY, null);
scope.putCopyableUserData(REF_MEMBER_THIS_CLASS_KEY, null);
for(PsiElement child = scope.getFirstChild(); child != null; child = child.getNextSibling()){
clearContextInfo(child);
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.netapp.v2019_11_01.implementation;
import com.microsoft.azure.arm.collection.InnerSupportsGet;
import com.microsoft.azure.arm.collection.InnerSupportsDelete;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.management.netapp.v2019_11_01.NetAppAccountPatch;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.PATCH;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Accounts.
*/
public class AccountsInner implements InnerSupportsGet<NetAppAccountInner>, InnerSupportsDelete<Void> {
/** The Retrofit service to perform REST calls. */
private AccountsService service;
/** The service client containing this operation class. */
private AzureNetAppFilesManagementClientImpl client;
/**
* Initializes an instance of AccountsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public AccountsInner(Retrofit retrofit, AzureNetAppFilesManagementClientImpl client) {
this.service = retrofit.create(AccountsService.class);
this.client = client;
}
/**
* The interface defining all the services for Accounts to be
* used by Retrofit to perform actually REST calls.
*/
interface AccountsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts listByResourceGroup" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts")
Observable<Response<ResponseBody>> listByResourceGroup(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts getByResourceGroup" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}")
Observable<Response<ResponseBody>> getByResourceGroup(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Body NetAppAccountInner body, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Body NetAppAccountInner body, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts beginDelete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> beginDelete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.netapp.v2019_11_01.Accounts update" })
@PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.NetApp/netAppAccounts/{accountName}")
Observable<Response<ResponseBody>> update(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Body NetAppAccountPatch body, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Describe all NetApp Accounts in a resource group.
* List and describe all NetApp accounts in the resource group.
*
* @param resourceGroupName The name of the resource group.
* @return the PagedList<NetAppAccountInner> object if successful.
*/
public PagedList<NetAppAccountInner> listByResourceGroup(String resourceGroupName) {
PageImpl<NetAppAccountInner> page = new PageImpl<>();
page.setItems(listByResourceGroupWithServiceResponseAsync(resourceGroupName).toBlocking().single().body());
page.setNextPageLink(null);
return new PagedList<NetAppAccountInner>(page) {
@Override
public Page<NetAppAccountInner> nextPage(String nextPageLink) {
return null;
}
};
}
/**
* Describe all NetApp Accounts in a resource group.
* List and describe all NetApp accounts in the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<NetAppAccountInner>> listByResourceGroupAsync(String resourceGroupName, final ServiceCallback<List<NetAppAccountInner>> serviceCallback) {
return ServiceFuture.fromResponse(listByResourceGroupWithServiceResponseAsync(resourceGroupName), serviceCallback);
}
/**
* Describe all NetApp Accounts in a resource group.
* List and describe all NetApp accounts in the resource group.
*
* @param resourceGroupName The name of the resource group.
* @return the observable to the List<NetAppAccountInner> object
*/
public Observable<Page<NetAppAccountInner>> listByResourceGroupAsync(String resourceGroupName) {
return listByResourceGroupWithServiceResponseAsync(resourceGroupName).map(new Func1<ServiceResponse<List<NetAppAccountInner>>, Page<NetAppAccountInner>>() {
@Override
public Page<NetAppAccountInner> call(ServiceResponse<List<NetAppAccountInner>> response) {
PageImpl<NetAppAccountInner> page = new PageImpl<>();
page.setItems(response.body());
return page;
}
});
}
/**
* Describe all NetApp Accounts in a resource group.
* List and describe all NetApp accounts in the resource group.
*
* @param resourceGroupName The name of the resource group.
* @return the observable to the List<NetAppAccountInner> object
*/
public Observable<ServiceResponse<List<NetAppAccountInner>>> listByResourceGroupWithServiceResponseAsync(String resourceGroupName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listByResourceGroup(this.client.subscriptionId(), resourceGroupName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<NetAppAccountInner>>>>() {
@Override
public Observable<ServiceResponse<List<NetAppAccountInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<NetAppAccountInner>> result = listByResourceGroupDelegate(response);
List<NetAppAccountInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<NetAppAccountInner>> clientResponse = new ServiceResponse<List<NetAppAccountInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<NetAppAccountInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<NetAppAccountInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<NetAppAccountInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Describe a NetApp Account.
* Get the NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the NetAppAccountInner object if successful.
*/
public NetAppAccountInner getByResourceGroup(String resourceGroupName, String accountName) {
return getByResourceGroupWithServiceResponseAsync(resourceGroupName, accountName).toBlocking().single().body();
}
/**
* Describe a NetApp Account.
* Get the NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<NetAppAccountInner> getByResourceGroupAsync(String resourceGroupName, String accountName, final ServiceCallback<NetAppAccountInner> serviceCallback) {
return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, accountName), serviceCallback);
}
/**
* Describe a NetApp Account.
* Get the NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<NetAppAccountInner> getByResourceGroupAsync(String resourceGroupName, String accountName) {
return getByResourceGroupWithServiceResponseAsync(resourceGroupName, accountName).map(new Func1<ServiceResponse<NetAppAccountInner>, NetAppAccountInner>() {
@Override
public NetAppAccountInner call(ServiceResponse<NetAppAccountInner> response) {
return response.body();
}
});
}
/**
* Describe a NetApp Account.
* Get the NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<ServiceResponse<NetAppAccountInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.getByResourceGroup(this.client.subscriptionId(), resourceGroupName, accountName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NetAppAccountInner>>>() {
@Override
public Observable<ServiceResponse<NetAppAccountInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NetAppAccountInner> clientResponse = getByResourceGroupDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<NetAppAccountInner> getByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<NetAppAccountInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<NetAppAccountInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the NetAppAccountInner object if successful.
*/
public NetAppAccountInner createOrUpdate(String resourceGroupName, String accountName, NetAppAccountInner body) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body).toBlocking().last().body();
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<NetAppAccountInner> createOrUpdateAsync(String resourceGroupName, String accountName, NetAppAccountInner body, final ServiceCallback<NetAppAccountInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body), serviceCallback);
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<NetAppAccountInner> createOrUpdateAsync(String resourceGroupName, String accountName, NetAppAccountInner body) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body).map(new Func1<ServiceResponse<NetAppAccountInner>, NetAppAccountInner>() {
@Override
public NetAppAccountInner call(ServiceResponse<NetAppAccountInner> response) {
return response.body();
}
});
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<NetAppAccountInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String accountName, NetAppAccountInner body) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (body == null) {
throw new IllegalArgumentException("Parameter body is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Validator.validate(body);
Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, accountName, body, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<NetAppAccountInner>() { }.getType());
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the NetAppAccountInner object if successful.
*/
public NetAppAccountInner beginCreateOrUpdate(String resourceGroupName, String accountName, NetAppAccountInner body) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body).toBlocking().single().body();
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<NetAppAccountInner> beginCreateOrUpdateAsync(String resourceGroupName, String accountName, NetAppAccountInner body, final ServiceCallback<NetAppAccountInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body), serviceCallback);
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<NetAppAccountInner> beginCreateOrUpdateAsync(String resourceGroupName, String accountName, NetAppAccountInner body) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, body).map(new Func1<ServiceResponse<NetAppAccountInner>, NetAppAccountInner>() {
@Override
public NetAppAccountInner call(ServiceResponse<NetAppAccountInner> response) {
return response.body();
}
});
}
/**
* Create or update a NetApp account.
* Create or update the specified NetApp account within the resource group.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<ServiceResponse<NetAppAccountInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String accountName, NetAppAccountInner body) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (body == null) {
throw new IllegalArgumentException("Parameter body is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Validator.validate(body);
return service.beginCreateOrUpdate(this.client.subscriptionId(), resourceGroupName, accountName, body, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NetAppAccountInner>>>() {
@Override
public Observable<ServiceResponse<NetAppAccountInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NetAppAccountInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<NetAppAccountInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<NetAppAccountInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<NetAppAccountInner>() { }.getType())
.register(201, new TypeToken<NetAppAccountInner>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String accountName) {
deleteWithServiceResponseAsync(resourceGroupName, accountName).toBlocking().last().body();
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String accountName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, accountName), serviceCallback);
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<Void> deleteAsync(String resourceGroupName, String accountName) {
return deleteWithServiceResponseAsync(resourceGroupName, accountName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Observable<Response<ResponseBody>> observable = service.delete(this.client.subscriptionId(), resourceGroupName, accountName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void beginDelete(String resourceGroupName, String accountName) {
beginDeleteWithServiceResponseAsync(resourceGroupName, accountName).toBlocking().single().body();
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String accountName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, accountName), serviceCallback);
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> beginDeleteAsync(String resourceGroupName, String accountName) {
return beginDeleteWithServiceResponseAsync(resourceGroupName, accountName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Delete a NetApp account.
* Delete the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.beginDelete(this.client.subscriptionId(), resourceGroupName, accountName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = beginDeleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter())
.register(202, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Update a NetApp account.
* Patch the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the NetAppAccountInner object if successful.
*/
public NetAppAccountInner update(String resourceGroupName, String accountName, NetAppAccountPatch body) {
return updateWithServiceResponseAsync(resourceGroupName, accountName, body).toBlocking().single().body();
}
/**
* Update a NetApp account.
* Patch the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<NetAppAccountInner> updateAsync(String resourceGroupName, String accountName, NetAppAccountPatch body, final ServiceCallback<NetAppAccountInner> serviceCallback) {
return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, accountName, body), serviceCallback);
}
/**
* Update a NetApp account.
* Patch the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<NetAppAccountInner> updateAsync(String resourceGroupName, String accountName, NetAppAccountPatch body) {
return updateWithServiceResponseAsync(resourceGroupName, accountName, body).map(new Func1<ServiceResponse<NetAppAccountInner>, NetAppAccountInner>() {
@Override
public NetAppAccountInner call(ServiceResponse<NetAppAccountInner> response) {
return response.body();
}
});
}
/**
* Update a NetApp account.
* Patch the specified NetApp account.
*
* @param resourceGroupName The name of the resource group.
* @param accountName The name of the NetApp account
* @param body NetApp Account object supplied in the body of the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetAppAccountInner object
*/
public Observable<ServiceResponse<NetAppAccountInner>> updateWithServiceResponseAsync(String resourceGroupName, String accountName, NetAppAccountPatch body) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (body == null) {
throw new IllegalArgumentException("Parameter body is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Validator.validate(body);
return service.update(this.client.subscriptionId(), resourceGroupName, accountName, body, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NetAppAccountInner>>>() {
@Override
public Observable<ServiceResponse<NetAppAccountInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NetAppAccountInner> clientResponse = updateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<NetAppAccountInner> updateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<NetAppAccountInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<NetAppAccountInner>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2012-2014 Igor Zinken - http://www.igorski.nl
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package nl.igorski.lib.utils.storage.database;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import nl.igorski.lib.utils.storage.database.definitions.DBColumn;
import nl.igorski.lib.utils.storage.database.interfaces.IEntity;
/**
* Created by IntelliJ IDEA.
* User: igorzinken
* Date: 4/18/12
* Time: 1:03 PM
* To change this template use File | Settings | File Templates.
*
* DataBase provides a convenience API for managing a SQLite table
* using a simplified ORM system
*/
public class DataBase extends SQLiteOpenHelper
{
protected Context _context;
protected SQLiteDatabase _db;
protected String _table;
private String _lastException;
/**
* open an instance of a DataBase Object, the extension of
* this class should be a model
*
* @param context {Context}
* @param aTable {String} name of the table the model operates on
* @param aDataBaseName {String} name of the database to connect
* @param aDataBaseVersion {int} version of the database
*/
public DataBase( Context context, String aTable, String aDataBaseName, int aDataBaseVersion )
{
super( context, aDataBaseName, null, aDataBaseVersion );
_context = context;
_table = aTable;
try
{
getWritableDatabase(); // invokes the onCreate method
}
catch( SQLiteException e )
{
Log.d( "SYNTH", "Exception thrown attempting to access writeableDatabase" );
}
}
/* public methods */
/**
* count the amount of available entities available
* in the database
* @return {int}
*/
public int count()
{
String sql = "SELECT * FROM " + this._table;
SQLiteDatabase db = this.getWritableDatabase();
return db.rawQuery( sql, null ).getCount();
}
public boolean createTable( String tableName, DBColumn[] columns )
{
String sql = "CREATE TABLE IF NOT EXISTS " + tableName + " (";
for ( int i = 0, l = columns.length; i < l; ++i )
{
final DBColumn col = columns[ i ];
sql += createSQLFromDBColumn( col, false );
if ( i < ( l - 1 ))
sql += ", ";
}
sql += ");";
return query( sql );
}
// TODO: updateTable
public boolean dropTable( String tableName )
{
return query( "DROP TABLE IF EXISTS " + tableName );
}
/**
* when a transaction returned false, the last
* error has been caught and formatted as text
* @return {String}
*/
public String getLastError()
{
return _lastException;
}
public Context getContext()
{
return _context;
}
/* inherited */
@Override
public void onCreate( SQLiteDatabase db )
{
_db = db;
/* in your subclass' extensions it is likely
that you execute a createTable query here */
}
@Override
public void onUpgrade( SQLiteDatabase db, int oldVersion, int newVersion )
{
// Logs that the database is being upgraded
// Log.w(TAG, "Upgrading database from version " + oldVersion + " to "
// + newVersion + ", which will destroy all old data");
// TODO: migrate or else we lose all old data!
// // Kills the table and existing data
// db.execSQL("DROP TABLE IF EXISTS notes");
//
// // Recreates the database with a new version
// onCreate( db );
}
/* protected methods */
/**
* to be called by the methods in the subclass, formatting
* the models Entity into a ContentValues object
*
* @param values {ContentValues}
* @return {int} the insert ID
*/
protected int insertRow( ContentValues values )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return -1;
}
long id = db.insert( this._table, null, values );
db.close();
return ( int ) id;
}
/**
* to be called by the methods in the subclass, to be
* used to update a single row / entity in the table
* this assumes a primary key named "id" is present as a
* column in the table !
*
* @param id {int} id of the row in the database
* @param values {ContentValues} the row data with updated values
* @return {boolean} success
*/
protected boolean updateRow( int id, ContentValues values )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return false;
}
int success = db.update( this._table, values, "id = ?", new String[] { String.valueOf( id ) });
db.close();
return ( success > 0 );
}
/**
* to be called by the methods in the subclass, to be
* used to delete a single row / entity in the table
* this assumes a primary key named "id" is present as a
* column in the table !
*
* @param id {int} id of the row in the database
* @return {boolean} success
*/
protected boolean deleteRow( int id )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return false;
}
int success = db.delete( this._table, "id = ?", new String[] { String.valueOf( id ) });
db.close();
return ( success > 0 );
}
/**
* same as "deleteRow", only now a different column
* other than the primary key can be specified for removal
* @param aColumnName {String} column name to select on
* @param aValue {String} the columns value
* @return {Cursor} a Database Cursor instance containing the result
*/
protected boolean deleteByColumn( String aColumnName, String aValue )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return false;
}
int success = db.delete( this._table, aColumnName + " = ?", new String[] { aValue });
db.close();
return ( success > 0 );
}
/**
* to be used by the subclass, this selects a single item
* by its unique id, this assumes a primary key named "id" is present
* as a column in the table!
*
* @param id {int} id of the row in the database
* @return {Cursor} a Database Cursor instance containing the result
*/
protected Cursor getRowById( int id )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return null;
}
Cursor cursor = db.query( this._table, new String[] { "*" }, "id = ?",
new String[] { String.valueOf( id ) }, null, null, null, null );
if ( cursor != null && cursor.getCount() > 0 )
return cursor;
else
return null;
/**
* the returned Cursor can be cast to the model entity
* by using the overriden "parseEntity"-method, for example:
*
* if ( cursor != null )
* cursor.moveToFirst();
*
* return ( {ENTITY_CLASS} ) parseEntity( cursor );
*/
}
/**
* same as "getRowById", only now a different column
* other than the primary key can be specified for retrieval
*
* @param aColumnName {String} column name to select on
* @param aValue {String} the columns value
* @return {Cursor} a Database Cursor instance containing the result
*/
protected Cursor getRowByColumn( String aColumnName, String aValue )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return null;
}
Cursor cursor = db.query( this._table, new String[] { "*" }, aColumnName + " = ?",
new String[] { aValue }, null, null, null, null );
if ( cursor != null && cursor.getCount() > 0 )
return cursor;
else
return null;
}
/**
* to be used by the subclass, this selects all items available
* in the database, with pagination and conditions
*
* @param pageNo {int} the current page number
* @param amount {int} the amount of items to be displayed on each page
* @param conditions {String} optional conditions constructing a WHERE clause
* for instance : "WHERE visible = 1 AND created < 1335010787"
* @return {Cursor} a Database Cursor instance containing the result
*/
protected Cursor selectAll( int pageNo, int amount, String conditions )
{
SQLiteDatabase db;
try {
db = this.getWritableDatabase();
}
catch ( SQLiteException e )
{
return null;
}
String sql = "SELECT * FROM " + this._table + " ";
sql += conditions + " ";
sql += "LIMIT " + ( pageNo * amount ) + ", " + amount;
return db.rawQuery( sql, null );
/**
* the returned cursor can be looped in your subclass
* casting each result to the model entity by using
* the overridden "parseEntity"-method, for example:
* if ( cursor.moveToFirst())
* {
* do
* {
* songList.add( ( VOSong ) parseEntity( cursor ));
* }
* while ( cursor.moveToNext());
* }
*/
}
protected IEntity parseEntity( Cursor cursor )
{
class tmp implements IEntity
{
public String getType()
{
return "this is an example";
}
// each extension of this database Object
// should be a model with their own Entity
// results from getById, getList will all
// parse their objects using this overridden method
}
return new tmp();
}
/* private methods */
/**
* executes a database transaction
* @param sql {String} the SQL statement to execute
* @return {boolean} whether transaction completed successfully
*/
private boolean query( String sql )
{
try
{
_db.execSQL( sql );
}
catch ( SQLException e )
{
formatException( e );
return false;
}
return true;
}
/**
* adds a column to a table
* @param tableName {String} name of the table to operate on
* @param columnDescription {DBColumn} definition of the column to add
*
* @return {String}
*/
private boolean addField( String tableName, DBColumn columnDescription )
{
String sql = "ALTER TABLE " + tableName + " ADD " + this.createSQLFromDBColumn( columnDescription, true );
return query( sql );
}
/**
* build the column definition for add / alter queries
* @param columnDescription {DBColumn} a column as defined in a fields Array from the Tables class
* @param isUpdate {boolean} whether we're creating a update statement to an existing table
*
* @return {String}
*/
private String createSQLFromDBColumn( DBColumn columnDescription, boolean isUpdate )
{
String sql = "";
sql += columnDescription.COLUMN_NAME + " " + columnDescription.TYPE;
if ( columnDescription.PROPERTY != null )
{
/**
* if we're updating an existing table and the column
* has a NOT NULL definition, we must add a default value
* to be applied on the existing rows in the table
*/
if ( columnDescription.PROPERTY.contains( "NOT NULL" ) && isUpdate )
{
sql += " default '' " + columnDescription.PROPERTY;
}
else {
sql += " " + columnDescription.PROPERTY;
}
}
return sql;
}
/**
* whenever the execute statement catches an
* error, it is passed through this function
* @param e {SQLException}
* @return {String}
*/
private void formatException( SQLException e )
{
// TODO: generate IDs for queries and
// store these in a stack ?
_lastException = e.getMessage();
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.2-hudson-jaxb-ri-2.2-63-
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.10.29 at 05:09:52 \uc624\ud6c4 KST
//
package net.ion.open.oadr2.model.v20b.xmldsig11;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import org.jvnet.jaxb2_commons.lang.Equals;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy;
import org.jvnet.jaxb2_commons.lang.HashCode;
import org.jvnet.jaxb2_commons.lang.HashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBHashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* <p>Java class for X509DigestType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="X509DigestType">
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>base64Binary">
* <attribute name="Algorithm" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "X509DigestType", propOrder = {
"value"
})
@XmlRootElement(name = "X509Digest")
public class X509Digest implements Serializable, Equals, HashCode, ToString
{
private final static long serialVersionUID = 1L;
@XmlValue
protected byte[] value;
@XmlAttribute(name = "Algorithm", required = true)
@XmlSchemaType(name = "anyURI")
protected String algorithm;
/**
* Default no-arg constructor
*
*/
public X509Digest() {
super();
}
/**
* Fully-initialising value constructor
*
*/
public X509Digest(final byte[] value, final String algorithm) {
this.value = value;
this.algorithm = algorithm;
}
/**
* Gets the value of the value property.
*
* @return
* possible object is
* byte[]
*/
public byte[] getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* byte[]
*/
public void setValue(byte[] value) {
this.value = ((byte[]) value);
}
/**
* Gets the value of the algorithm property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlgorithm() {
return algorithm;
}
/**
* Sets the value of the algorithm property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlgorithm(String value) {
this.algorithm = value;
}
public String toString() {
final ToStringStrategy strategy = JAXBToStringStrategy.INSTANCE;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
{
byte[] theValue;
theValue = this.getValue();
strategy.appendField(locator, this, "value", buffer, theValue);
}
{
String theAlgorithm;
theAlgorithm = this.getAlgorithm();
strategy.appendField(locator, this, "algorithm", buffer, theAlgorithm);
}
return buffer;
}
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) {
if (!(object instanceof X509Digest)) {
return false;
}
if (this == object) {
return true;
}
final X509Digest that = ((X509Digest) object);
{
byte[] lhsValue;
lhsValue = this.getValue();
byte[] rhsValue;
rhsValue = that.getValue();
if (!strategy.equals(LocatorUtils.property(thisLocator, "value", lhsValue), LocatorUtils.property(thatLocator, "value", rhsValue), lhsValue, rhsValue)) {
return false;
}
}
{
String lhsAlgorithm;
lhsAlgorithm = this.getAlgorithm();
String rhsAlgorithm;
rhsAlgorithm = that.getAlgorithm();
if (!strategy.equals(LocatorUtils.property(thisLocator, "algorithm", lhsAlgorithm), LocatorUtils.property(thatLocator, "algorithm", rhsAlgorithm), lhsAlgorithm, rhsAlgorithm)) {
return false;
}
}
return true;
}
public boolean equals(Object object) {
final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE;
return equals(null, null, object, strategy);
}
public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) {
int currentHashCode = 1;
{
byte[] theValue;
theValue = this.getValue();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "value", theValue), currentHashCode, theValue);
}
{
String theAlgorithm;
theAlgorithm = this.getAlgorithm();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "algorithm", theAlgorithm), currentHashCode, theAlgorithm);
}
return currentHashCode;
}
public int hashCode() {
final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE;
return this.hashCode(null, strategy);
}
public X509Digest withValue(byte[] value) {
setValue(value);
return this;
}
public X509Digest withAlgorithm(String value) {
setAlgorithm(value);
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.repl;
import com.google.common.collect.Collections2;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.common.repl.ReplScope;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc;
import org.apache.hadoop.hive.ql.ddl.view.create.CreateViewDesc;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.BootstrapEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.ConstraintEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.DatabaseEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.FunctionEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.PartitionEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.filesystem.BootstrapEventsIterator;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.filesystem.ConstraintEventsIterator;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.filesystem.FSTableEvent;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.LoadConstraint;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.LoadDatabase;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.LoadFunction;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table.LoadPartitions;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table.LoadTable;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table.TableContext;
import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.util.Context;
import org.apache.hadoop.hive.ql.exec.repl.incremental.IncrementalLoadTasksBuilder;
import org.apache.hadoop.hive.ql.exec.repl.util.AddDependencyToLeaves;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
import org.apache.hadoop.hive.ql.exec.repl.util.TaskTracker;
import org.apache.hadoop.hive.ql.exec.util.DAGTraversal;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.HiveTableName;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.load.MetaData;
import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.REPL_DUMP_SKIP_IMMUTABLE_DATA_COPY;
import static org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.LoadDatabase.AlterDatabase;
import static org.apache.hadoop.hive.ql.exec.repl.ReplAck.LOAD_ACKNOWLEDGEMENT;
import static org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.RANGER_AUTHORIZER;
public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
private static final long serialVersionUID = 1L;
private final static int ZERO_TASKS = 0;
@Override
public String getName() {
return (work.isIncrementalLoad() ? "REPL_INCREMENTAL_LOAD" : "REPL_BOOTSTRAP_LOAD");
}
@Override
public StageType getType() {
return work.isIncrementalLoad() ? StageType.REPL_INCREMENTAL_LOAD : StageType.REPL_BOOTSTRAP_LOAD;
}
/**
* Provides the root Tasks created as a result of this loadTask run which will be executed
* by the driver. It does not track details across multiple runs of LoadTask.
*/
private static class Scope {
boolean database = false, table = false;
List<Task<?>> rootTasks = new ArrayList<>();
}
@Override
public int execute() {
try {
Task<?> rootTask = work.getRootTask();
if (rootTask != null) {
rootTask.setChildTasks(null);
}
work.setRootTask(this);
this.parentTasks = null;
if (shouldLoadAtlasMetadata()) {
addAtlasLoadTask();
}
if (shouldLoadAuthorizationMetadata()) {
initiateAuthorizationLoadTask();
}
if (work.isIncrementalLoad()) {
return executeIncrementalLoad();
} else {
return executeBootStrapLoad();
}
} catch (RuntimeException e) {
LOG.error("replication failed with run time exception", e);
try {
work.getMetricCollector().reportEnd(Status.FAILED);
} catch (SemanticException ex) {
LOG.error("Failed to collect Metrics ", ex);
}
throw e;
} catch (Exception e) {
LOG.error("replication failed", e);
setException(e);
try {
work.getMetricCollector().reportEnd(Status.FAILED);
} catch (SemanticException ex) {
LOG.error("Failed to collect Metrics ", ex);
}
return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
}
}
private boolean shouldLoadAuthorizationMetadata() {
return conf.getBoolVar(HiveConf.ConfVars.REPL_INCLUDE_AUTHORIZATION_METADATA);
}
private void initiateAuthorizationLoadTask() throws SemanticException {
if (RANGER_AUTHORIZER.equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.REPL_AUTHORIZATION_PROVIDER_SERVICE))) {
Path rangerLoadRoot = new Path(new Path(work.dumpDirectory).getParent(), ReplUtils.REPL_RANGER_BASE_DIR);
LOG.info("Adding Import Ranger Metadata Task from {} ", rangerLoadRoot);
RangerLoadWork rangerLoadWork = new RangerLoadWork(rangerLoadRoot, work.getSourceDbName(), work.dbNameToLoadIn,
work.getMetricCollector());
Task<RangerLoadWork> rangerLoadTask = TaskFactory.get(rangerLoadWork, conf);
if (childTasks == null) {
childTasks = new ArrayList<>();
}
childTasks.add(rangerLoadTask);
} else {
throw new SemanticException("Authorizer " + conf.getVar(HiveConf.ConfVars.REPL_AUTHORIZATION_PROVIDER_SERVICE)
+ " not supported for replication ");
}
}
private void addAtlasLoadTask() throws HiveException {
Path atlasDumpDir = new Path(new Path(work.dumpDirectory).getParent(), ReplUtils.REPL_ATLAS_BASE_DIR);
LOG.info("Adding task to load Atlas metadata from {} ", atlasDumpDir);
AtlasLoadWork atlasLoadWork = new AtlasLoadWork(work.getSourceDbName(), work.dbNameToLoadIn, atlasDumpDir,
work.getMetricCollector());
Task<?> atlasLoadTask = TaskFactory.get(atlasLoadWork, conf);
if (childTasks == null) {
childTasks = new ArrayList<>();
}
childTasks.add(atlasLoadTask);
}
private boolean shouldLoadAtlasMetadata() {
return conf.getBoolVar(HiveConf.ConfVars.REPL_INCLUDE_ATLAS_METADATA);
}
private int executeBootStrapLoad() throws Exception {
int maxTasks = conf.getIntVar(HiveConf.ConfVars.REPL_APPROX_MAX_LOAD_TASKS);
Context loadContext = new Context(work.dumpDirectory, conf, getHive(),
work.sessionStateLineageState, context);
TaskTracker loadTaskTracker = new TaskTracker(maxTasks);
/*
for now for simplicity we are doing just one directory ( one database ), come back to use
of multiple databases once we have the basic flow to chain creating of tasks in place for
a database ( directory )
*/
BootstrapEventsIterator iterator = work.bootstrapIterator();
ConstraintEventsIterator constraintIterator = work.constraintsIterator();
/*
This is used to get hold of a reference during the current creation of tasks and is initialized
with "0" tasks such that it will be non consequential in any operations done with task tracker
compositions.
*/
TaskTracker dbTracker = new TaskTracker(ZERO_TASKS);
TaskTracker tableTracker = new TaskTracker(ZERO_TASKS);
Scope scope = new Scope();
boolean loadingConstraint = false;
if (!iterator.hasNext() && constraintIterator.hasNext()) {
loadingConstraint = true;
}
while ((iterator.hasNext() || (loadingConstraint && constraintIterator.hasNext()))
&& loadTaskTracker.canAddMoreTasks()) {
BootstrapEvent next;
if (!loadingConstraint) {
next = iterator.next();
} else {
next = constraintIterator.next();
}
switch (next.eventType()) {
case Database:
DatabaseEvent dbEvent = (DatabaseEvent) next;
dbTracker = new LoadDatabase(loadContext, dbEvent, work.dbNameToLoadIn, loadTaskTracker).tasks();
loadTaskTracker.update(dbTracker);
if (work.hasDbState()) {
loadTaskTracker.update(updateDatabaseLastReplID(maxTasks, loadContext, scope));
} else {
// Scope might have set to database in some previous iteration of loop, so reset it to false if database
// tracker has no tasks.
scope.database = false;
}
work.updateDbEventState(dbEvent.toState());
if (dbTracker.hasTasks()) {
scope.rootTasks.addAll(dbTracker.tasks());
scope.database = true;
}
dbTracker.debugLog("database");
break;
case Table:
/*
Implicit assumption here is that database level is processed first before table level,
which will depend on the iterator used since it should provide the higher level directory
listing before providing the lower level listing. This is also required such that
the dbTracker / tableTracker are setup correctly always.
*/
TableContext tableContext = new TableContext(dbTracker, work.dbNameToLoadIn);
FSTableEvent tableEvent = (FSTableEvent) next;
if (TableType.VIRTUAL_VIEW.name().equals(tableEvent.getMetaData().getTable().getTableType())) {
tableTracker = new TaskTracker(1);
tableTracker.addTask(createViewTask(tableEvent.getMetaData(), work.dbNameToLoadIn, conf));
} else {
LoadTable loadTable = new LoadTable(tableEvent, loadContext, iterator.replLogger(), tableContext,
loadTaskTracker, work.getMetricCollector());
tableTracker = loadTable.tasks(work.isIncrementalLoad());
}
setUpDependencies(dbTracker, tableTracker);
if (!scope.database && tableTracker.hasTasks()) {
scope.rootTasks.addAll(tableTracker.tasks());
scope.table = true;
} else {
// Scope might have set to table in some previous iteration of loop, so reset it to false if table
// tracker has no tasks.
scope.table = false;
}
if (!TableType.VIRTUAL_VIEW.name().equals(tableEvent.getMetaData().getTable().getTableType())) {
/*
for table replication if we reach the max number of tasks then for the next run we will
try to reload the same table again, this is mainly for ease of understanding the code
as then we can avoid handling == > loading partitions for the table given that
the creation of table lead to reaching max tasks vs, loading next table since current
one does not have partitions.
*/
// for a table we explicitly try to load partitions as there is no separate partitions events.
LoadPartitions loadPartitions =
new LoadPartitions(loadContext, iterator.replLogger(), loadTaskTracker, tableEvent,
work.dbNameToLoadIn, tableContext, work.getMetricCollector());
TaskTracker partitionsTracker = loadPartitions.tasks();
partitionsPostProcessing(iterator, scope, loadTaskTracker, tableTracker,
partitionsTracker);
tableTracker.debugLog("table");
partitionsTracker.debugLog("partitions for table");
}
break;
case Partition:
/*
This will happen only when loading tables and we reach the limit of number of tasks we can create;
hence we know here that the table should exist and there should be a lastPartitionName
*/
addLoadPartitionTasks(loadContext, next, dbTracker, iterator, scope, loadTaskTracker, tableTracker);
break;
case Function:
loadTaskTracker.update(addLoadFunctionTasks(loadContext, iterator, next, dbTracker, scope));
break;
case Constraint:
loadTaskTracker.update(addLoadConstraintsTasks(loadContext, next, dbTracker, scope));
break;
default:
break;
}
if (!loadingConstraint && !iterator.currentDbHasNext()) {
createEndReplLogTask(loadContext, scope, iterator.replLogger());
}
}
boolean addAnotherLoadTask = iterator.hasNext()
|| loadTaskTracker.hasReplicationState()
|| constraintIterator.hasNext();
if (addAnotherLoadTask) {
createBuilderTask(scope.rootTasks);
}
// Update last repl ID of the database only if the current dump is not incremental. If bootstrap
// is combined with incremental dump, it contains only tables to bootstrap. So, needn't change
// last repl ID of the database.
if (!iterator.hasNext() && !constraintIterator.hasNext() && !work.isIncrementalLoad()) {
loadTaskTracker.update(updateDatabaseLastReplID(maxTasks, loadContext, scope));
work.updateDbEventState(null);
}
if (childTasks == null) {
childTasks = new ArrayList<>();
}
childTasks.addAll(scope.rootTasks);
/*
Since there can be multiple rounds of this run all of which will be tied to the same
query id -- generated in compile phase , adding a additional UUID to the end to print each run
in separate files.
*/
LOG.info("Root Tasks / Total Tasks : {} / {} ", childTasks.size(), loadTaskTracker.numberOfTasks());
// Populate the driver context with the scratch dir info from the repl context, so that the
// temp dirs will be cleaned up later
context.getFsScratchDirs().putAll(loadContext.pathInfo.getFsScratchDirs());
if (!HiveConf.getBoolVar(conf, REPL_DUMP_SKIP_IMMUTABLE_DATA_COPY)) {
createReplLoadCompleteAckTask();
}
LOG.info("completed load task run : {}", work.executedLoadTask());
return 0;
}
private TaskTracker addLoadPartitionTasks(Context loadContext, BootstrapEvent next, TaskTracker dbTracker,
BootstrapEventsIterator iterator, Scope scope, TaskTracker loadTaskTracker,
TaskTracker tableTracker) throws Exception {
PartitionEvent event = (PartitionEvent) next;
TableContext tableContext = new TableContext(dbTracker, work.dbNameToLoadIn);
LoadPartitions loadPartitions =
new LoadPartitions(loadContext, iterator.replLogger(), tableContext, loadTaskTracker,
event.asTableEvent(), work.dbNameToLoadIn, event.lastPartitionReplicated(), work.getMetricCollector());
/*
the tableTracker here should be a new instance and not an existing one as this can
only happen when we break in between loading partitions.
*/
TaskTracker partitionsTracker = loadPartitions.tasks();
partitionsPostProcessing(iterator, scope, loadTaskTracker, tableTracker,
partitionsTracker);
partitionsTracker.debugLog("partitions");
return partitionsTracker;
}
private TaskTracker addLoadConstraintsTasks(Context loadContext,
BootstrapEvent next,
TaskTracker dbTracker,
Scope scope) throws IOException, SemanticException {
LoadConstraint loadConstraint =
new LoadConstraint(loadContext, (ConstraintEvent) next, work.dbNameToLoadIn, dbTracker);
TaskTracker constraintTracker = loadConstraint.tasks();
scope.rootTasks.addAll(constraintTracker.tasks());
constraintTracker.debugLog("constraints");
return constraintTracker;
}
private TaskTracker addLoadFunctionTasks(Context loadContext, BootstrapEventsIterator iterator, BootstrapEvent next,
TaskTracker dbTracker, Scope scope) throws IOException, SemanticException {
LoadFunction loadFunction = new LoadFunction(loadContext, iterator.replLogger(),
(FunctionEvent) next, work.dbNameToLoadIn, dbTracker, work.getMetricCollector());
TaskTracker functionsTracker = loadFunction.tasks();
if (!scope.database) {
scope.rootTasks.addAll(functionsTracker.tasks());
} else {
setUpDependencies(dbTracker, functionsTracker);
}
functionsTracker.debugLog("functions");
return functionsTracker;
}
public static Task<?> createViewTask(MetaData metaData, String dbNameToLoadIn, HiveConf conf)
throws SemanticException {
Table table = new Table(metaData.getTable());
String dbName = dbNameToLoadIn == null ? table.getDbName() : dbNameToLoadIn;
TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName);
String dbDotView = tableName.getNotEmptyDbTable();
CreateViewDesc desc = new CreateViewDesc(dbDotView, table.getAllCols(), null, table.getParameters(),
table.getPartColNames(), false, false, false, table.getSd().getInputFormat(),
table.getSd().getOutputFormat(),
table.getSd().getSerdeInfo().getSerializationLib());
String originalText = table.getViewOriginalText();
String expandedText = table.getViewExpandedText();
if (!dbName.equals(table.getDbName())) {
// TODO: If the DB name doesn't match with the metadata from dump, then need to rewrite the original and expanded
// texts using new DB name. Currently it refers to the source database name.
}
desc.setViewOriginalText(originalText);
desc.setViewExpandedText(expandedText);
desc.setPartCols(table.getPartCols());
desc.setReplicationSpec(metaData.getReplicationSpec());
desc.setOwnerName(table.getOwner());
return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), conf);
}
/**
* If replication policy is changed between previous and current load, then the excluded tables in
* the new replication policy will be dropped.
*
* @throws HiveException Failed to get/drop the tables.
*/
private void dropTablesExcludedInReplScope(ReplScope replScope) throws HiveException {
// If all tables are included in replication scope, then nothing to be dropped.
if ((replScope == null) || replScope.includeAllTables()) {
return;
}
Hive db = getHive();
String dbName = replScope.getDbName();
// List all the tables that are excluded in the current repl scope.
Iterable<String> tableNames = Collections2.filter(db.getAllTables(dbName),
tableName -> {
assert (tableName != null);
return !tableName.toLowerCase().startsWith(
SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX.toLowerCase())
&& !replScope.tableIncludedInReplScope(tableName);
});
for (String table : tableNames) {
db.dropTable(dbName + "." + table, true);
}
LOG.info("Tables in the Database: {} that are excluded in the replication scope are dropped.",
dbName);
}
private void createReplLoadCompleteAckTask() {
if ((work.isIncrementalLoad() && !work.incrementalLoadTasksBuilder().hasMoreWork() && !work.hasBootstrapLoadTasks())
|| (!work.isIncrementalLoad() && !work.hasBootstrapLoadTasks())) {
//All repl load tasks are executed and status is 0, create the task to add the acknowledgement
AckWork replLoadAckWork = new AckWork(
new Path(work.dumpDirectory, LOAD_ACKNOWLEDGEMENT.toString()));
Task<AckWork> loadAckWorkTask = TaskFactory.get(replLoadAckWork, conf);
if (childTasks.isEmpty()) {
childTasks.add(loadAckWorkTask);
} else {
DAGTraversal.traverse(childTasks, new AddDependencyToLeaves(Collections.singletonList(loadAckWorkTask)));
}
}
}
private void createEndReplLogTask(Context context, Scope scope,
ReplLogger replLogger) throws SemanticException {
Map<String, String> dbProps;
if (work.isIncrementalLoad()) {
dbProps = new HashMap<>();
dbProps.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(),
work.incrementalLoadTasksBuilder().eventTo().toString());
} else {
Database dbInMetadata = work.databaseEvent(context.hiveConf).dbInMetadata(work.dbNameToLoadIn);
dbProps = dbInMetadata.getParameters();
}
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, dbProps, work.getMetricCollector());
Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork, conf);
if (scope.rootTasks.isEmpty()) {
scope.rootTasks.add(replLogTask);
} else {
DAGTraversal.traverse(scope.rootTasks, new AddDependencyToLeaves(Collections.singletonList(replLogTask)));
}
}
/**
* There was a database update done before and we want to make sure we update the last repl
* id on this database as we are now going to switch to processing a new database.
* This has to be last task in the graph since if there are intermediate tasks and the last.repl.id
* is a root level task then in the execution phase the root level tasks will get executed first,
* however if any of the child tasks of the bootstrap load failed then even though the bootstrap has failed
* the last repl status of the target database will return a valid value, which will not represent
* the state of the database.
*/
private TaskTracker updateDatabaseLastReplID(int maxTasks, Context context, Scope scope)
throws SemanticException {
/*
we don't want to put any limits on this task as this is essential before we start
processing new database events.
*/
TaskTracker taskTracker =
new AlterDatabase(context, work.databaseEvent(context.hiveConf), work.dbNameToLoadIn,
new TaskTracker(maxTasks)).tasks();
AddDependencyToLeaves function = new AddDependencyToLeaves(taskTracker.tasks());
DAGTraversal.traverse(scope.rootTasks, function);
return taskTracker;
}
private void partitionsPostProcessing(BootstrapEventsIterator iterator,
Scope scope, TaskTracker loadTaskTracker, TaskTracker tableTracker,
TaskTracker partitionsTracker) {
setUpDependencies(tableTracker, partitionsTracker);
if (!scope.database && !scope.table) {
scope.rootTasks.addAll(partitionsTracker.tasks());
}
loadTaskTracker.update(tableTracker);
loadTaskTracker.update(partitionsTracker);
if (partitionsTracker.hasReplicationState()) {
iterator.setReplicationState(partitionsTracker.replicationState());
}
}
/*
This sets up dependencies such that a child task is dependant on the parent to be complete.
*/
private void setUpDependencies(TaskTracker parentTasks, TaskTracker childTasks) {
if (parentTasks.hasTasks()) {
for (Task<?> parentTask : parentTasks.tasks()) {
for (Task<?> childTask : childTasks.tasks()) {
parentTask.addDependentTask(childTask);
}
}
} else {
for (Task<?> childTask : childTasks.tasks()) {
parentTasks.addTask(childTask);
}
}
}
private void createBuilderTask(List<Task<?>> rootTasks) {
// Use loadTask as dependencyCollection
Task<ReplLoadWork> loadTask = TaskFactory.get(work, conf);
DAGTraversal.traverse(rootTasks, new AddDependencyToLeaves(loadTask));
}
private int executeIncrementalLoad() throws Exception {
// If replication policy is changed between previous and current repl load, then drop the tables
// that are excluded in the new replication policy.
dropTablesExcludedInReplScope(work.currentReplScope);
IncrementalLoadTasksBuilder builder = work.incrementalLoadTasksBuilder();
// If incremental events are already applied, then check and perform if need to bootstrap any tables.
if (!builder.hasMoreWork() && work.isLastReplIDUpdated()) {
if (work.hasBootstrapLoadTasks()) {
LOG.debug("Current incremental dump have tables to be bootstrapped. Switching to bootstrap "
+ "mode after applying all events.");
return executeBootStrapLoad();
}
}
List<Task<?>> childTasks = new ArrayList<>();
int maxTasks = conf.getIntVar(HiveConf.ConfVars.REPL_APPROX_MAX_LOAD_TASKS);
TaskTracker tracker = new TaskTracker(maxTasks);
childTasks.add(builder.build(context, getHive(), LOG, tracker));
// If there are no more events to be applied, add a task to update the last.repl.id of the
// target database to the event id of the last event considered by the dump. Next
// incremental cycle won't consider the events in this dump again if it starts from this id.
if (!builder.hasMoreWork()) {
// The name of the database to be loaded into is either specified directly in REPL LOAD
// command i.e. when dbNameToLoadIn has a valid dbname or is available through dump
// metadata during table level replication.
String dbName = work.dbNameToLoadIn;
if (dbName == null || StringUtils.isBlank(dbName)) {
if (work.currentReplScope != null) {
String replScopeDbName = work.currentReplScope.getDbName();
if (replScopeDbName != null && !"*".equals(replScopeDbName)) {
dbName = replScopeDbName;
}
}
}
// If we are replicating to multiple databases at a time, it's not
// possible to know which all databases we are replicating into and hence we can not
// update repl id in all those databases.
if (StringUtils.isNotBlank(dbName)) {
String lastEventid = builder.eventTo().toString();
Map<String, String> mapProp = new HashMap<>();
mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), lastEventid);
AlterDatabaseSetPropertiesDesc alterDbDesc =
new AlterDatabaseSetPropertiesDesc(dbName, mapProp,
new ReplicationSpec(lastEventid, lastEventid));
Task<?> updateReplIdTask =
TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc), conf);
DAGTraversal.traverse(childTasks, new AddDependencyToLeaves(updateReplIdTask));
work.setLastReplIDUpdated(true);
LOG.debug("Added task to set last repl id of db " + dbName + " to " + lastEventid);
}
}
// Once all the incremental events are applied, enable bootstrap of tables if exist.
if (builder.hasMoreWork() || work.hasBootstrapLoadTasks()) {
DAGTraversal.traverse(childTasks, new AddDependencyToLeaves(TaskFactory.get(work, conf)));
}
if (this.childTasks == null) {
this.childTasks = new ArrayList<>();
}
this.childTasks.addAll(childTasks);
createReplLoadCompleteAckTask();
return 0;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.VersionUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerFinishedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeReconnectEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStartedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStatusEvent;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider;
import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils;
import org.apache.hadoop.yarn.util.RackResolver;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import com.google.common.annotations.VisibleForTesting;
public class ResourceTrackerService extends AbstractService implements
ResourceTracker {
private static final Log LOG = LogFactory.getLog(ResourceTrackerService.class);
private static final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
private final RMContext rmContext;
private final NodesListManager nodesListManager;
private final NMLivelinessMonitor nmLivelinessMonitor;
private final RMContainerTokenSecretManager containerTokenSecretManager;
private final NMTokenSecretManagerInRM nmTokenSecretManager;
private long nextHeartBeatInterval;
private Server server;
private InetSocketAddress resourceTrackerAddress;
private String minimumNodeManagerVersion;
private int minAllocMb;
private int minAllocVcores;
private boolean isDistributedNodeLabelsConf;
public ResourceTrackerService(RMContext rmContext,
NodesListManager nodesListManager,
NMLivelinessMonitor nmLivelinessMonitor,
RMContainerTokenSecretManager containerTokenSecretManager,
NMTokenSecretManagerInRM nmTokenSecretManager) {
super(ResourceTrackerService.class.getName());
this.rmContext = rmContext;
this.nodesListManager = nodesListManager;
this.nmLivelinessMonitor = nmLivelinessMonitor;
this.containerTokenSecretManager = containerTokenSecretManager;
this.nmTokenSecretManager = nmTokenSecretManager;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
RackResolver.init(conf);
nextHeartBeatInterval =
conf.getLong(YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS,
YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS);
if (nextHeartBeatInterval <= 0) {
throw new YarnRuntimeException("Invalid Configuration. "
+ YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS
+ " should be larger than 0.");
}
minAllocMb = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
minAllocVcores = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
minimumNodeManagerVersion = conf.get(
YarnConfiguration.RM_NODEMANAGER_MINIMUM_VERSION,
YarnConfiguration.DEFAULT_RM_NODEMANAGER_MINIMUM_VERSION);
isDistributedNodeLabelsConf =
YarnConfiguration.isDistributedNodeLabelConfiguration(conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
// ResourceTrackerServer authenticates NodeManager via Kerberos if
// security is enabled, so no secretManager.
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
this.server =
rpc.getServer(ResourceTracker.class, this, resourceTrackerAddress,
conf, null,
conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT));
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
InputStream inputStream =
this.rmContext.getConfigurationProvider()
.getConfigurationInputStream(conf,
YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE);
if (inputStream != null) {
conf.addResource(inputStream);
}
refreshServiceAcls(conf, RMPolicyProvider.getInstance());
}
this.server.start();
conf.updateConnectAddr(YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
server.getListenerAddress());
}
@Override
protected void serviceStop() throws Exception {
if (this.server != null) {
this.server.stop();
}
super.serviceStop();
}
/**
* Helper method to handle received ContainerStatus. If this corresponds to
* the completion of a master-container of a managed AM,
* we call the handler for RMAppAttemptContainerFinishedEvent.
*/
@SuppressWarnings("unchecked")
@VisibleForTesting
void handleNMContainerStatus(NMContainerStatus containerStatus, NodeId nodeId) {
ApplicationAttemptId appAttemptId =
containerStatus.getContainerId().getApplicationAttemptId();
RMApp rmApp =
rmContext.getRMApps().get(appAttemptId.getApplicationId());
if (rmApp == null) {
LOG.error("Received finished container : "
+ containerStatus.getContainerId()
+ " for unknown application " + appAttemptId.getApplicationId()
+ " Skipping.");
return;
}
if (rmApp.getApplicationSubmissionContext().getUnmanagedAM()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring container completion status for unmanaged AM "
+ rmApp.getApplicationId());
}
return;
}
RMAppAttempt rmAppAttempt = rmApp.getRMAppAttempt(appAttemptId);
Container masterContainer = rmAppAttempt.getMasterContainer();
if (masterContainer.getId().equals(containerStatus.getContainerId())
&& containerStatus.getContainerState() == ContainerState.COMPLETE) {
ContainerStatus status =
ContainerStatus.newInstance(containerStatus.getContainerId(),
containerStatus.getContainerState(), containerStatus.getDiagnostics(),
containerStatus.getContainerExitStatus());
// sending master container finished event.
RMAppAttemptContainerFinishedEvent evt =
new RMAppAttemptContainerFinishedEvent(appAttemptId, status,
nodeId);
rmContext.getDispatcher().getEventHandler().handle(evt);
}
}
static Set<String> convertToStringSet(Set<NodeLabel> nodeLabels) {
if (null == nodeLabels) {
return null;
}
Set<String> labels = new HashSet<String>();
for (NodeLabel label : nodeLabels) {
labels.add(label.getName());
}
return labels;
}
@SuppressWarnings("unchecked")
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
NodeId nodeId = request.getNodeId();
String host = nodeId.getHost();
int cmPort = nodeId.getPort();
int httpPort = request.getHttpPort();
Resource capability = request.getResource();
String nodeManagerVersion = request.getNMVersion();
RegisterNodeManagerResponse response = recordFactory
.newRecordInstance(RegisterNodeManagerResponse.class);
if (!minimumNodeManagerVersion.equals("NONE")) {
if (minimumNodeManagerVersion.equals("EqualToRM")) {
minimumNodeManagerVersion = YarnVersionInfo.getVersion();
}
if ((nodeManagerVersion == null) ||
(VersionUtil.compareVersions(nodeManagerVersion,minimumNodeManagerVersion)) < 0) {
String message =
"Disallowed NodeManager Version " + nodeManagerVersion
+ ", is less than the minimum version "
+ minimumNodeManagerVersion + " sending SHUTDOWN signal to "
+ "NodeManager.";
LOG.info(message);
response.setDiagnosticsMessage(message);
response.setNodeAction(NodeAction.SHUTDOWN);
return response;
}
}
// Check if this node is a 'valid' node
if (!this.nodesListManager.isValidNode(host)) {
String message =
"Disallowed NodeManager from " + host
+ ", Sending SHUTDOWN signal to the NodeManager.";
LOG.info(message);
response.setDiagnosticsMessage(message);
response.setNodeAction(NodeAction.SHUTDOWN);
return response;
}
// Check if this node has minimum allocations
if (capability.getMemory() < minAllocMb
|| capability.getVirtualCores() < minAllocVcores) {
String message =
"NodeManager from " + host
+ " doesn't satisfy minimum allocations, Sending SHUTDOWN"
+ " signal to the NodeManager.";
LOG.info(message);
response.setDiagnosticsMessage(message);
response.setNodeAction(NodeAction.SHUTDOWN);
return response;
}
response.setContainerTokenMasterKey(containerTokenSecretManager
.getCurrentKey());
response.setNMTokenMasterKey(nmTokenSecretManager
.getCurrentKey());
RMNode rmNode = new RMNodeImpl(nodeId, rmContext, host, cmPort, httpPort,
resolve(host), capability, nodeManagerVersion);
RMNode oldNode = this.rmContext.getRMNodes().putIfAbsent(nodeId, rmNode);
if (oldNode == null) {
this.rmContext.getDispatcher().getEventHandler().handle(
new RMNodeStartedEvent(nodeId, request.getNMContainerStatuses(),
request.getRunningApplications()));
} else {
LOG.info("Reconnect from the node at: " + host);
this.nmLivelinessMonitor.unregister(nodeId);
// Reset heartbeat ID since node just restarted.
oldNode.resetLastNodeHeartBeatResponse();
this.rmContext
.getDispatcher()
.getEventHandler()
.handle(
new RMNodeReconnectEvent(nodeId, rmNode, request
.getRunningApplications(), request.getNMContainerStatuses()));
}
// On every node manager register we will be clearing NMToken keys if
// present for any running application.
this.nmTokenSecretManager.removeNodeKey(nodeId);
this.nmLivelinessMonitor.register(nodeId);
// Handle received container status, this should be processed after new
// RMNode inserted
if (!rmContext.isWorkPreservingRecoveryEnabled()) {
if (!request.getNMContainerStatuses().isEmpty()) {
LOG.info("received container statuses on node manager register :"
+ request.getNMContainerStatuses());
for (NMContainerStatus status : request.getNMContainerStatuses()) {
handleNMContainerStatus(status, nodeId);
}
}
}
// Update node's labels to RM's NodeLabelManager.
Set<String> nodeLabels = convertToStringSet(request.getNodeLabels());
if (isDistributedNodeLabelsConf && nodeLabels != null) {
try {
updateNodeLabelsFromNMReport(nodeLabels, nodeId);
response.setAreNodeLabelsAcceptedByRM(true);
} catch (IOException ex) {
// Ensure the exception is captured in the response
response.setDiagnosticsMessage(ex.getMessage());
response.setAreNodeLabelsAcceptedByRM(false);
}
}
StringBuilder message = new StringBuilder();
message.append("NodeManager from node ").append(host).append("(cmPort: ")
.append(cmPort).append(" httpPort: ");
message.append(httpPort).append(") ")
.append("registered with capability: ").append(capability);
message.append(", assigned nodeId ").append(nodeId);
if (response.getAreNodeLabelsAcceptedByRM()) {
message.append(", node labels { ").append(
StringUtils.join(",", nodeLabels) + " } ");
}
LOG.info(message.toString());
response.setNodeAction(NodeAction.NORMAL);
response.setRMIdentifier(ResourceManager.getClusterTimeStamp());
response.setRMVersion(YarnVersionInfo.getVersion());
return response;
}
@SuppressWarnings("unchecked")
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
NodeStatus remoteNodeStatus = request.getNodeStatus();
/**
* Here is the node heartbeat sequence...
* 1. Check if it's a valid (i.e. not excluded) node
* 2. Check if it's a registered node
* 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat
* 4. Send healthStatus to RMNode
* 5. Update node's labels if distributed Node Labels configuration is enabled
*/
NodeId nodeId = remoteNodeStatus.getNodeId();
// 1. Check if it's a valid (i.e. not excluded) node
if (!this.nodesListManager.isValidNode(nodeId.getHost())) {
String message =
"Disallowed NodeManager nodeId: " + nodeId + " hostname: "
+ nodeId.getHost();
LOG.info(message);
return YarnServerBuilderUtils.newNodeHeartbeatResponse(
NodeAction.SHUTDOWN, message);
}
// 2. Check if it's a registered node
RMNode rmNode = this.rmContext.getRMNodes().get(nodeId);
if (rmNode == null) {
/* node does not exist */
String message = "Node not found resyncing " + remoteNodeStatus.getNodeId();
LOG.info(message);
return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC,
message);
}
// Send ping
this.nmLivelinessMonitor.receivedPing(nodeId);
// 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat
NodeHeartbeatResponse lastNodeHeartbeatResponse = rmNode.getLastNodeHeartBeatResponse();
if (remoteNodeStatus.getResponseId() + 1 == lastNodeHeartbeatResponse
.getResponseId()) {
LOG.info("Received duplicate heartbeat from node "
+ rmNode.getNodeAddress()+ " responseId=" + remoteNodeStatus.getResponseId());
return lastNodeHeartbeatResponse;
} else if (remoteNodeStatus.getResponseId() + 1 < lastNodeHeartbeatResponse
.getResponseId()) {
String message =
"Too far behind rm response id:"
+ lastNodeHeartbeatResponse.getResponseId() + " nm response id:"
+ remoteNodeStatus.getResponseId();
LOG.info(message);
// TODO: Just sending reboot is not enough. Think more.
this.rmContext.getDispatcher().getEventHandler().handle(
new RMNodeEvent(nodeId, RMNodeEventType.REBOOTING));
return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC,
message);
}
// Heartbeat response
NodeHeartbeatResponse nodeHeartBeatResponse = YarnServerBuilderUtils
.newNodeHeartbeatResponse(lastNodeHeartbeatResponse.
getResponseId() + 1, NodeAction.NORMAL, null, null, null, null,
nextHeartBeatInterval);
rmNode.updateNodeHeartbeatResponseForCleanup(nodeHeartBeatResponse);
populateKeys(request, nodeHeartBeatResponse);
ConcurrentMap<ApplicationId, ByteBuffer> systemCredentials =
rmContext.getSystemCredentialsForApps();
if (!systemCredentials.isEmpty()) {
nodeHeartBeatResponse.setSystemCredentialsForApps(systemCredentials);
}
// 4. Send status to RMNode, saving the latest response.
RMNodeStatusEvent nodeStatusEvent =
new RMNodeStatusEvent(nodeId, remoteNodeStatus.getNodeHealthStatus(),
remoteNodeStatus.getContainersStatuses(),
remoteNodeStatus.getKeepAliveApplications(), nodeHeartBeatResponse);
if (request.getLogAggregationReportsForApps() != null
&& !request.getLogAggregationReportsForApps().isEmpty()) {
nodeStatusEvent.setLogAggregationReportsForApps(request
.getLogAggregationReportsForApps());
}
this.rmContext.getDispatcher().getEventHandler().handle(nodeStatusEvent);
// 5. Update node's labels to RM's NodeLabelManager.
if (isDistributedNodeLabelsConf && request.getNodeLabels() != null) {
try {
updateNodeLabelsFromNMReport(
convertToStringSet(request.getNodeLabels()), nodeId);
nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(true);
} catch (IOException ex) {
//ensure the error message is captured and sent across in response
nodeHeartBeatResponse.setDiagnosticsMessage(ex.getMessage());
nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(false);
}
}
return nodeHeartBeatResponse;
}
@SuppressWarnings("unchecked")
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
UnRegisterNodeManagerResponse response = recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
NodeId nodeId = request.getNodeId();
RMNode rmNode = this.rmContext.getRMNodes().get(nodeId);
if (rmNode == null) {
LOG.info("Node not found, ignoring the unregister from node id : "
+ nodeId);
return response;
}
LOG.info("Node with node id : " + nodeId
+ " has shutdown, hence unregistering the node.");
this.nmLivelinessMonitor.unregister(nodeId);
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMNodeEvent(nodeId, RMNodeEventType.SHUTDOWN));
return response;
}
private void updateNodeLabelsFromNMReport(Set<String> nodeLabels,
NodeId nodeId) throws IOException {
try {
Map<NodeId, Set<String>> labelsUpdate =
new HashMap<NodeId, Set<String>>();
labelsUpdate.put(nodeId, nodeLabels);
this.rmContext.getNodeLabelManager().replaceLabelsOnNode(labelsUpdate);
if (LOG.isDebugEnabled()) {
LOG.debug("Node Labels {" + StringUtils.join(",", nodeLabels)
+ "} from Node " + nodeId + " were Accepted from RM");
}
} catch (IOException ex) {
StringBuilder errorMessage = new StringBuilder();
errorMessage.append("Node Labels {")
.append(StringUtils.join(",", nodeLabels))
.append("} reported from NM with ID ").append(nodeId)
.append(" was rejected from RM with exception message as : ")
.append(ex.getMessage());
LOG.error(errorMessage, ex);
throw new IOException(errorMessage.toString(), ex);
}
}
private void populateKeys(NodeHeartbeatRequest request,
NodeHeartbeatResponse nodeHeartBeatResponse) {
// Check if node's masterKey needs to be updated and if the currentKey has
// roller over, send it across
// ContainerTokenMasterKey
MasterKey nextMasterKeyForNode =
this.containerTokenSecretManager.getNextKey();
if (nextMasterKeyForNode != null
&& (request.getLastKnownContainerTokenMasterKey().getKeyId()
!= nextMasterKeyForNode.getKeyId())) {
nodeHeartBeatResponse.setContainerTokenMasterKey(nextMasterKeyForNode);
}
// NMTokenMasterKey
nextMasterKeyForNode = this.nmTokenSecretManager.getNextKey();
if (nextMasterKeyForNode != null
&& (request.getLastKnownNMTokenMasterKey().getKeyId()
!= nextMasterKeyForNode.getKeyId())) {
nodeHeartBeatResponse.setNMTokenMasterKey(nextMasterKeyForNode);
}
}
/**
* resolving the network topology.
* @param hostName the hostname of this node.
* @return the resolved {@link Node} for this nodemanager.
*/
public static Node resolve(String hostName) {
return RackResolver.resolve(hostName);
}
void refreshServiceAcls(Configuration configuration,
PolicyProvider policyProvider) {
this.server.refreshServiceAclWithLoadedConfiguration(configuration,
policyProvider);
}
@VisibleForTesting
public Server getServer() {
return this.server;
}
}
| |
package org.faudroids.doublestacks.ui;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.View;
import android.view.WindowManager;
import android.widget.ImageView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.games.Games;
import com.google.android.gms.games.multiplayer.Invitation;
import com.google.android.gms.games.multiplayer.Multiplayer;
import com.google.example.games.basegameutils.BaseGameUtils;
import org.faudroids.doublestacks.R;
import org.faudroids.doublestacks.google.ConnectionManager;
import javax.inject.Inject;
import roboguice.activity.RoboActivity;
import roboguice.inject.ContentView;
import roboguice.inject.InjectView;
import timber.log.Timber;
@ContentView(R.layout.activity_main)
public class MainActivity extends RoboActivity implements
ActionListener,
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener {
private static final int REQUEST_LOGIN = 42;
private boolean resolvingConnectionFailure = false;
private boolean autoStartLoginFlow = true;
private boolean loginClicked = false;
@Inject private ConnectionManager connectionManager;
@Inject private GoogleApiClient googleApiClient;
@Inject private WindowUtils windowUtils;
@Inject private TutorialUtils tutorialUtils;
// flag for showing menu after tutorial
private MenuFragment menuFragment = null;
@InjectView(R.id.spinner) private View spinnerContainer;
@InjectView(R.id.spinner_image) private ImageView spinnerImage;
protected SpinnerUtils spinnerUtils;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
spinnerUtils = new SpinnerUtils(spinnerContainer, spinnerImage);
}
@Override
public void onStart() {
super.onStart();
Timber.d("connecting to google api client");
// prevent lost connection when screen sleeps
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
googleApiClient.registerConnectionCallbacks(this);
googleApiClient.registerConnectionFailedListener(this);
connectionManager.connect();
spinnerUtils.showSpinner();
}
@Override
public void onStop() {
Timber.d("disconnecting from google api client");
connectionManager.disconnect();
googleApiClient.unregisterConnectionCallbacks(this);
googleApiClient.unregisterConnectionFailedListener(this);
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
super.onStop();
}
@Override
public void onConnected(Bundle connectionHint) {
Timber.d("Google login successful");
// check if pending invitation
Invitation invitation = null;
if (connectionHint != null) {
invitation = connectionHint.getParcelable(Multiplayer.EXTRA_INVITATION);
}
spinnerUtils.hideSpinner();
// check for tutorial
MenuFragment fragment = MenuFragment.createInstance(invitation);
if (tutorialUtils.shouldShowTutorial()) {
tutorialUtils.onShowTutorial();
onTutorialStart();
this.menuFragment = fragment;
} else {
showFragment(fragment, true);
}
}
@Override
public void onConnectionSuspended(int i) {
// nothing to do for now
Timber.d("Connection suspended (" + i + ")");
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
Timber.d("onConnectionFailed (" + connectionResult.getErrorCode() + ")");
// already resolving?
if (resolvingConnectionFailure) return;
// check for if login should be handled
if (loginClicked || autoStartLoginFlow) {
autoStartLoginFlow = false;
loginClicked = false;
resolvingConnectionFailure = true;
// Attempt to resolve the connection failure using BaseGameUtils.
if (!BaseGameUtils.resolveConnectionFailure(
this,
googleApiClient,
connectionResult,
REQUEST_LOGIN,
getString(R.string.error_login_other))) {
resolvingConnectionFailure = false;
}
}
spinnerUtils.hideSpinner();
showFragment(new LoginFragment(), false);
}
@Override
public void onActivityResult(int request, int response, Intent data) {
switch (request) {
case REQUEST_LOGIN:
if (response != RESULT_OK) {
Timber.d("failed to resolve login error");
} else {
googleApiClient.connect();
}
}
}
@Override
public void onLoginClicked() {
loginClicked = true;
googleApiClient.connect();
}
@Override
public void onLogoutClicked() {
// log out and restart
Games.signOut(googleApiClient);
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}
@Override
public void onGameStarted() {
showFragment(new GameFragment(), true);
}
@Override
public void onGameStopped() {
getFragmentManager().popBackStack();
}
@Override
public void onSettingsClicked() {
showFragment(new SettingsFragment(), true);
}
@Override
public void onTutorialStart() {
showFragment(new TutorialFragment(), true);
}
@Override
public void onTutorialEnd() {
getFragmentManager().popBackStack();
if (menuFragment != null) showFragment(menuFragment, true);
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
boolean success = windowUtils.startImmersiveMode(getWindow());
if (!success) return;
WindowManager.LayoutParams attributes = getWindow().getAttributes();
attributes.flags |= WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS;
getWindow().setAttributes(attributes);
}
}
private void showFragment(Fragment fragment, boolean addToBackStack) {
FragmentTransaction transaction = getFragmentManager().beginTransaction();
transaction.replace(R.id.fragment_container, fragment);
if (addToBackStack) transaction.addToBackStack(null);
transaction.commit();
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.docs;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.CacheRuntimeConfiguration;
import org.ehcache.config.ResourceType;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import org.ehcache.core.config.DefaultConfiguration;
import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration;
import org.ehcache.jsr107.Eh107Configuration;
import org.ehcache.jsr107.EhcacheCachingProvider;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.pany.domain.Client;
import com.pany.domain.Product;
import java.io.File;
import java.util.Random;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.CompleteConfiguration;
import javax.cache.configuration.Configuration;
import javax.cache.configuration.Factory;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.CreatedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.spi.CachingProvider;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* This class uses unit test assertions but serves mostly as the live code repository for Asciidoctor documentation.
*/
public class EhCache107ConfigurationIntegrationDocTest {
private static final Logger LOGGER = LoggerFactory.getLogger(EhCache107ConfigurationIntegrationDocTest.class);
private CacheManager cacheManager;
private CachingProvider cachingProvider;
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@Before
public void setUp() throws Exception {
cachingProvider = Caching.getCachingProvider();
cacheManager = cachingProvider.getCacheManager();
}
@After
public void tearDown() throws Exception {
if(cacheManager != null) {
cacheManager.close();
}
if(cachingProvider != null) {
cachingProvider.close();
}
}
@Test
public void basicConfiguration() throws Exception {
// tag::basicConfigurationExample[]
CachingProvider provider = Caching.getCachingProvider(); // <1>
CacheManager cacheManager = provider.getCacheManager(); // <2>
MutableConfiguration<Long, String> configuration =
new MutableConfiguration<Long, String>() // <3>
.setTypes(Long.class, String.class) // <4>
.setStoreByValue(false) // <5>
.setExpiryPolicyFactory(CreatedExpiryPolicy.factoryOf(Duration.ONE_MINUTE)); // <6>
Cache<Long, String> cache = cacheManager.createCache("jCache", configuration); // <7>
cache.put(1L, "one"); // <8>
String value = cache.get(1L); // <9>
// end::basicConfigurationExample[]
assertThat(value, is("one"));
}
@Test
@SuppressWarnings("unchecked")
public void testGettingToEhcacheConfiguration() {
// tag::mutableConfigurationExample[]
MutableConfiguration<Long, String> configuration = new MutableConfiguration<>();
configuration.setTypes(Long.class, String.class);
Cache<Long, String> cache = cacheManager.createCache("someCache", configuration); // <1>
CompleteConfiguration<Long, String> completeConfiguration = cache.getConfiguration(CompleteConfiguration.class); // <2>
Eh107Configuration<Long, String> eh107Configuration = cache.getConfiguration(Eh107Configuration.class); // <3>
CacheRuntimeConfiguration<Long, String> runtimeConfiguration = eh107Configuration.unwrap(CacheRuntimeConfiguration.class); // <4>
// end::mutableConfigurationExample[]
assertThat(completeConfiguration, notNullValue());
assertThat(runtimeConfiguration, notNullValue());
// Check uses default JSR-107 expiry
long nanoTime = System.nanoTime();
LOGGER.info("Seeding random with {}", nanoTime);
Random random = new Random(nanoTime);
assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForCreation(random.nextLong(), Long.toOctalString(random.nextLong())),
equalTo(org.ehcache.expiry.ExpiryPolicy.INFINITE));
assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForAccess(random.nextLong(),
() -> Long.toOctalString(random.nextLong())), nullValue());
assertThat(runtimeConfiguration.getExpiryPolicy().getExpiryForUpdate(random.nextLong(),
() -> Long.toOctalString(random.nextLong()), Long.toOctalString(random.nextLong())), nullValue());
}
@Test
@SuppressWarnings("unchecked")
public void testUsingEhcacheConfiguration() throws Exception {
// tag::ehcacheBasedConfigurationExample[]
CacheConfiguration<Long, String> cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class,
ResourcePoolsBuilder.heap(10)).build(); // <1>
Cache<Long, String> cache = cacheManager.createCache("myCache",
Eh107Configuration.fromEhcacheCacheConfiguration(cacheConfiguration)); // <2>
Eh107Configuration<Long, String> configuration = cache.getConfiguration(Eh107Configuration.class);
configuration.unwrap(CacheConfiguration.class); // <3>
configuration.unwrap(CacheRuntimeConfiguration.class); // <4>
try {
cache.getConfiguration(CompleteConfiguration.class); // <5>
throw new AssertionError("IllegalArgumentException expected");
} catch (IllegalArgumentException iaex) {
// Expected
}
// end::ehcacheBasedConfigurationExample[]
}
@Test
public void testWithoutEhcacheExplicitDependencyCanSpecifyXML() throws Exception {
// tag::jsr107UsingXMLConfigExample[]
CachingProvider cachingProvider = Caching.getCachingProvider();
CacheManager manager = cachingProvider.getCacheManager( // <1>
getClass().getResource("/org/ehcache/docs/ehcache-jsr107-config.xml").toURI(), // <2>
getClass().getClassLoader()); // <3>
Cache<Long, Product> readyCache = manager.getCache("ready-cache", Long.class, Product.class); // <4>
// end::jsr107UsingXMLConfigExample[]
assertThat(readyCache, notNullValue());
}
@Test
@SuppressWarnings("unchecked")
public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Exception {
CacheManager manager = cachingProvider.getCacheManager(
getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(),
getClass().getClassLoader());
// tag::jsr107SupplementWithTemplatesExample[]
MutableConfiguration<Long, Client> mutableConfiguration = new MutableConfiguration<>();
mutableConfiguration.setTypes(Long.class, Client.class); // <1>
Cache<Long, Client> anyCache = manager.createCache("anyCache", mutableConfiguration); // <2>
CacheRuntimeConfiguration<Long, Client> ehcacheConfig = (CacheRuntimeConfiguration<Long, Client>)anyCache.getConfiguration(
Eh107Configuration.class).unwrap(CacheRuntimeConfiguration.class); // <3>
ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(); // <4>
Cache<Long, Client> anotherCache = manager.createCache("byRefCache", mutableConfiguration);
assertFalse(anotherCache.getConfiguration(Configuration.class).isStoreByValue()); // <5>
MutableConfiguration<String, Client> otherConfiguration = new MutableConfiguration<>();
otherConfiguration.setTypes(String.class, Client.class);
otherConfiguration.setExpiryPolicyFactory(CreatedExpiryPolicy.factoryOf(Duration.ONE_MINUTE)); // <6>
Cache<String, Client> foosCache = manager.createCache("foos", otherConfiguration);// <7>
CacheRuntimeConfiguration<Long, Client> foosEhcacheConfig = (CacheRuntimeConfiguration<Long, Client>)foosCache.getConfiguration(
Eh107Configuration.class).unwrap(CacheRuntimeConfiguration.class);
Client client1 = new Client("client1", 1);
foosEhcacheConfig.getExpiryPolicy().getExpiryForCreation(42L, client1).toMinutes(); // <8>
CompleteConfiguration<String, String> foosConfig = foosCache.getConfiguration(CompleteConfiguration.class);
try {
final Factory<ExpiryPolicy> expiryPolicyFactory = foosConfig.getExpiryPolicyFactory();
ExpiryPolicy expiryPolicy = expiryPolicyFactory.create(); // <9>
throw new AssertionError("Expected UnsupportedOperationException");
} catch (UnsupportedOperationException e) {
// Expected
}
// end::jsr107SupplementWithTemplatesExample[]
assertThat(ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L));
assertThat(foosEhcacheConfig.getExpiryPolicy().getExpiryForCreation(42L, client1),
is(java.time.Duration.ofMinutes(2)));
}
@Test
@SuppressWarnings("unchecked")
public void testTemplateOverridingStoreByValue() throws Exception {
cacheManager = cachingProvider.getCacheManager(
getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(),
getClass().getClassLoader());
MutableConfiguration<Long, Client> mutableConfiguration = new MutableConfiguration<>();
mutableConfiguration.setTypes(Long.class, Client.class);
Client client1 = new Client("client1", 1);
Cache<Long, Client> myCache = null;
myCache = cacheManager.createCache("anyCache", mutableConfiguration);
myCache.put(1L, client1);
assertNotSame(client1, myCache.get(1L));
assertTrue(myCache.getConfiguration(Configuration.class).isStoreByValue());
myCache = cacheManager.createCache("byRefCache", mutableConfiguration);
myCache.put(1L, client1);
assertSame(client1, myCache.get(1L));
assertFalse(myCache.getConfiguration(Configuration.class).isStoreByValue());
myCache = cacheManager.createCache("weirdCache1", mutableConfiguration);
myCache.put(1L, client1);
assertNotSame(client1, myCache.get(1L));
assertTrue(myCache.getConfiguration(Configuration.class).isStoreByValue());
myCache = cacheManager.createCache("weirdCache2", mutableConfiguration);
myCache.put(1L, client1);
assertSame(client1, myCache.get(1L));
assertFalse(myCache.getConfiguration(Configuration.class).isStoreByValue());
}
@Test
public void testTemplateOverridingStoreByRef() throws Exception {
cacheManager = cachingProvider.getCacheManager(
getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(),
getClass().getClassLoader());
MutableConfiguration<Long, Client> mutableConfiguration = new MutableConfiguration<>();
mutableConfiguration.setTypes(Long.class, Client.class).setStoreByValue(false);
Cache<Long, Client> myCache;
Client client1 = new Client("client1", 1);
myCache = cacheManager.createCache("anotherCache", mutableConfiguration);
myCache.put(1L, client1);
assertSame(client1, myCache.get(1L));
myCache = cacheManager.createCache("byValCache", mutableConfiguration);
myCache.put(1L, client1);
assertNotSame(client1, myCache.get(1L));
}
@Test
public void testCacheThroughAtomicsXMLValid() throws Exception {
cacheManager = cachingProvider.getCacheManager(
getClass().getResource("/org/ehcache/docs/ehcache-jsr107-cache-through.xml").toURI(),
getClass().getClassLoader());
}
@Test
public void testCacheManagerLevelConfiguration() throws Exception {
// tag::ehcacheCacheManagerConfigurationExample[]
CachingProvider cachingProvider = Caching.getCachingProvider();
EhcacheCachingProvider ehcacheProvider = (EhcacheCachingProvider) cachingProvider; // <1>
DefaultConfiguration configuration = new DefaultConfiguration(ehcacheProvider.getDefaultClassLoader(),
new DefaultPersistenceConfiguration(getPersistenceDirectory())); // <2>
CacheManager cacheManager = ehcacheProvider.getCacheManager(ehcacheProvider.getDefaultURI(), configuration); // <3>
// end::ehcacheCacheManagerConfigurationExample[]
assertThat(cacheManager, notNullValue());
}
private File getPersistenceDirectory() {
return tempFolder.getRoot();
}
}
| |
/*
* Copyright (c) 2008-2015, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.simulator.tests.icache;
import com.hazelcast.cache.impl.HazelcastServerCacheManager;
import com.hazelcast.cache.impl.HazelcastServerCachingProvider;
import com.hazelcast.client.cache.impl.HazelcastClientCacheManager;
import com.hazelcast.client.cache.impl.HazelcastClientCachingProvider;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.simulator.probes.probes.IntervalProbe;
import com.hazelcast.simulator.test.TestContext;
import com.hazelcast.simulator.test.TestRunner;
import com.hazelcast.simulator.test.annotations.Performance;
import com.hazelcast.simulator.test.annotations.Run;
import com.hazelcast.simulator.test.annotations.Setup;
import com.hazelcast.simulator.test.annotations.Teardown;
import com.hazelcast.simulator.test.annotations.Warmup;
import com.hazelcast.simulator.tests.helpers.KeyLocality;
import com.hazelcast.simulator.utils.ThreadSpawner;
import javax.cache.Cache;
import javax.cache.CacheException;
import javax.cache.CacheManager;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.simulator.tests.helpers.HazelcastTestUtils.isMemberNode;
import static com.hazelcast.simulator.tests.helpers.HazelcastTestUtils.waitClusterSize;
import static com.hazelcast.simulator.tests.helpers.KeyUtils.generateStringKeys;
import static com.hazelcast.simulator.utils.GeneratorUtils.generateStrings;
public class StringICacheTest {
private static final ILogger LOGGER = Logger.getLogger(StringICacheTest.class);
// properties
public int writePercentage = 10;
public int threadCount = 10;
public int keyLength = 10;
public int valueLength = 10;
public int keyCount = 10000;
public int valueCount = 10000;
public int logFrequency = 10000;
public int performanceUpdateFrequency = 10000;
// if we use the putAndGet (so returning a value) or the put (which returns void)
public boolean useGetAndPut = true;
public String basename = "stringicache";
public KeyLocality keyLocality = KeyLocality.RANDOM;
public int minNumberOfMembers = 0;
public IntervalProbe putLatency;
public IntervalProbe getLatency;
private Cache<String, String> cache;
private String[] keys;
private String[] values;
private final AtomicLong operations = new AtomicLong();
private TestContext testContext;
private HazelcastInstance targetInstance;
@Setup
public void setup(TestContext testContext) throws Exception {
if (writePercentage < 0) {
throw new IllegalArgumentException("Write percentage can't be smaller than 0");
}
if (writePercentage > 100) {
throw new IllegalArgumentException("Write percentage can't be larger than 100");
}
this.testContext = testContext;
targetInstance = testContext.getTargetInstance();
CacheManager cacheManager;
if (isMemberNode(targetInstance)) {
HazelcastServerCachingProvider hcp = new HazelcastServerCachingProvider();
cacheManager = new HazelcastServerCacheManager(
hcp, targetInstance, hcp.getDefaultURI(), hcp.getDefaultClassLoader(), null);
} else {
HazelcastClientCachingProvider hcp = new HazelcastClientCachingProvider();
cacheManager = new HazelcastClientCacheManager(
hcp, targetInstance, hcp.getDefaultURI(), hcp.getDefaultClassLoader(), null);
}
CacheConfig<String, String> config = new CacheConfig<String, String>();
config.setName(basename);
try {
cacheManager.createCache(basename, config);
} catch (CacheException hack) {
// temp hack to deal with multiple nodes wanting to make the same cache.
LOGGER.severe(hack);
}
cache = cacheManager.getCache(basename);
}
@Teardown
public void teardown() throws Exception {
cache.close();
}
@Warmup(global = false)
public void warmup() throws InterruptedException {
waitClusterSize(LOGGER, targetInstance, minNumberOfMembers);
keys = generateStringKeys(keyCount, keyLength, keyLocality, testContext.getTargetInstance());
values = generateStrings(valueCount, valueLength);
Random random = new Random();
for (int k = 0; k < keys.length; k++) {
String key = keys[random.nextInt(keyCount)];
String value = values[random.nextInt(valueCount)];
cache.put(key, value);
}
}
@Run
public void run() {
ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
for (int k = 0; k < threadCount; k++) {
spawner.spawn(new Worker());
}
spawner.awaitCompletion();
}
@Performance
public long getOperationCount() {
return operations.get();
}
private class Worker implements Runnable {
private final Random random = new Random();
@Override
public void run() {
long iteration = 0;
while (!testContext.isStopped()) {
String key = randomKey();
if (shouldWrite(iteration)) {
putLatency.started();
String value = randomValue();
if (useGetAndPut) {
cache.getAndPut(key, value);
} else {
cache.put(key, value);
}
putLatency.done();
} else {
getLatency.started();
cache.get(key);
getLatency.done();
}
iteration++;
if (iteration % logFrequency == 0) {
LOGGER.info(Thread.currentThread().getName() + " At iteration: " + iteration);
}
if (iteration % performanceUpdateFrequency == 0) {
operations.addAndGet(performanceUpdateFrequency);
}
}
operations.addAndGet(iteration % performanceUpdateFrequency);
}
private String randomValue() {
return values[random.nextInt(values.length)];
}
private String randomKey() {
int length = keys.length;
return keys[random.nextInt(length)];
}
private boolean shouldWrite(long iteration) {
if (writePercentage == 0) {
return false;
} else if (writePercentage == 100) {
return true;
} else {
return (iteration % 100) < writePercentage;
}
}
}
public static void main(String[] args) throws Exception {
StringICacheTest test = new StringICacheTest();
test.writePercentage = 10;
new TestRunner<StringICacheTest>(test).run();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.parser;
import com.facebook.presto.sql.tree.AddColumn;
import com.facebook.presto.sql.tree.AliasedRelation;
import com.facebook.presto.sql.tree.AllColumns;
import com.facebook.presto.sql.tree.Analyze;
import com.facebook.presto.sql.tree.ArithmeticBinaryExpression;
import com.facebook.presto.sql.tree.ArithmeticUnaryExpression;
import com.facebook.presto.sql.tree.ArrayConstructor;
import com.facebook.presto.sql.tree.AtTimeZone;
import com.facebook.presto.sql.tree.BetweenPredicate;
import com.facebook.presto.sql.tree.BinaryLiteral;
import com.facebook.presto.sql.tree.BindExpression;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.Call;
import com.facebook.presto.sql.tree.CallArgument;
import com.facebook.presto.sql.tree.Cast;
import com.facebook.presto.sql.tree.CharLiteral;
import com.facebook.presto.sql.tree.CoalesceExpression;
import com.facebook.presto.sql.tree.ColumnDefinition;
import com.facebook.presto.sql.tree.Commit;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.CreateSchema;
import com.facebook.presto.sql.tree.CreateTable;
import com.facebook.presto.sql.tree.CreateTableAsSelect;
import com.facebook.presto.sql.tree.CreateView;
import com.facebook.presto.sql.tree.Cube;
import com.facebook.presto.sql.tree.CurrentPath;
import com.facebook.presto.sql.tree.CurrentTime;
import com.facebook.presto.sql.tree.CurrentUser;
import com.facebook.presto.sql.tree.Deallocate;
import com.facebook.presto.sql.tree.DecimalLiteral;
import com.facebook.presto.sql.tree.Delete;
import com.facebook.presto.sql.tree.DereferenceExpression;
import com.facebook.presto.sql.tree.DescribeInput;
import com.facebook.presto.sql.tree.DescribeOutput;
import com.facebook.presto.sql.tree.DoubleLiteral;
import com.facebook.presto.sql.tree.DropColumn;
import com.facebook.presto.sql.tree.DropSchema;
import com.facebook.presto.sql.tree.DropTable;
import com.facebook.presto.sql.tree.DropView;
import com.facebook.presto.sql.tree.Except;
import com.facebook.presto.sql.tree.Execute;
import com.facebook.presto.sql.tree.ExistsPredicate;
import com.facebook.presto.sql.tree.Explain;
import com.facebook.presto.sql.tree.ExplainFormat;
import com.facebook.presto.sql.tree.ExplainOption;
import com.facebook.presto.sql.tree.ExplainType;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.Extract;
import com.facebook.presto.sql.tree.FrameBound;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.GenericLiteral;
import com.facebook.presto.sql.tree.Grant;
import com.facebook.presto.sql.tree.GroupBy;
import com.facebook.presto.sql.tree.GroupingElement;
import com.facebook.presto.sql.tree.GroupingOperation;
import com.facebook.presto.sql.tree.GroupingSets;
import com.facebook.presto.sql.tree.Identifier;
import com.facebook.presto.sql.tree.IfExpression;
import com.facebook.presto.sql.tree.InListExpression;
import com.facebook.presto.sql.tree.InPredicate;
import com.facebook.presto.sql.tree.Insert;
import com.facebook.presto.sql.tree.Intersect;
import com.facebook.presto.sql.tree.IntervalLiteral;
import com.facebook.presto.sql.tree.IsNotNullPredicate;
import com.facebook.presto.sql.tree.IsNullPredicate;
import com.facebook.presto.sql.tree.Isolation;
import com.facebook.presto.sql.tree.Join;
import com.facebook.presto.sql.tree.JoinCriteria;
import com.facebook.presto.sql.tree.JoinOn;
import com.facebook.presto.sql.tree.JoinUsing;
import com.facebook.presto.sql.tree.LambdaArgumentDeclaration;
import com.facebook.presto.sql.tree.LambdaExpression;
import com.facebook.presto.sql.tree.Lateral;
import com.facebook.presto.sql.tree.LikeClause;
import com.facebook.presto.sql.tree.LikePredicate;
import com.facebook.presto.sql.tree.LogicalBinaryExpression;
import com.facebook.presto.sql.tree.LongLiteral;
import com.facebook.presto.sql.tree.NaturalJoin;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.NodeLocation;
import com.facebook.presto.sql.tree.NotExpression;
import com.facebook.presto.sql.tree.NullIfExpression;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.OrderBy;
import com.facebook.presto.sql.tree.Parameter;
import com.facebook.presto.sql.tree.PathElement;
import com.facebook.presto.sql.tree.PathSpecification;
import com.facebook.presto.sql.tree.Prepare;
import com.facebook.presto.sql.tree.Property;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.QuantifiedComparisonExpression;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.QueryBody;
import com.facebook.presto.sql.tree.QuerySpecification;
import com.facebook.presto.sql.tree.Relation;
import com.facebook.presto.sql.tree.RenameColumn;
import com.facebook.presto.sql.tree.RenameSchema;
import com.facebook.presto.sql.tree.RenameTable;
import com.facebook.presto.sql.tree.ResetSession;
import com.facebook.presto.sql.tree.Revoke;
import com.facebook.presto.sql.tree.Rollback;
import com.facebook.presto.sql.tree.Rollup;
import com.facebook.presto.sql.tree.Row;
import com.facebook.presto.sql.tree.SampledRelation;
import com.facebook.presto.sql.tree.SearchedCaseExpression;
import com.facebook.presto.sql.tree.Select;
import com.facebook.presto.sql.tree.SelectItem;
import com.facebook.presto.sql.tree.SetPath;
import com.facebook.presto.sql.tree.SetSession;
import com.facebook.presto.sql.tree.ShowCatalogs;
import com.facebook.presto.sql.tree.ShowColumns;
import com.facebook.presto.sql.tree.ShowCreate;
import com.facebook.presto.sql.tree.ShowFunctions;
import com.facebook.presto.sql.tree.ShowGrants;
import com.facebook.presto.sql.tree.ShowSchemas;
import com.facebook.presto.sql.tree.ShowSession;
import com.facebook.presto.sql.tree.ShowStats;
import com.facebook.presto.sql.tree.ShowTables;
import com.facebook.presto.sql.tree.SimpleCaseExpression;
import com.facebook.presto.sql.tree.SimpleGroupBy;
import com.facebook.presto.sql.tree.SingleColumn;
import com.facebook.presto.sql.tree.SortItem;
import com.facebook.presto.sql.tree.StartTransaction;
import com.facebook.presto.sql.tree.Statement;
import com.facebook.presto.sql.tree.StringLiteral;
import com.facebook.presto.sql.tree.SubqueryExpression;
import com.facebook.presto.sql.tree.SubscriptExpression;
import com.facebook.presto.sql.tree.Table;
import com.facebook.presto.sql.tree.TableElement;
import com.facebook.presto.sql.tree.TableSubquery;
import com.facebook.presto.sql.tree.TimeLiteral;
import com.facebook.presto.sql.tree.TimestampLiteral;
import com.facebook.presto.sql.tree.TransactionAccessMode;
import com.facebook.presto.sql.tree.TransactionMode;
import com.facebook.presto.sql.tree.TryExpression;
import com.facebook.presto.sql.tree.Union;
import com.facebook.presto.sql.tree.Unnest;
import com.facebook.presto.sql.tree.Use;
import com.facebook.presto.sql.tree.Values;
import com.facebook.presto.sql.tree.WhenClause;
import com.facebook.presto.sql.tree.Window;
import com.facebook.presto.sql.tree.WindowFrame;
import com.facebook.presto.sql.tree.With;
import com.facebook.presto.sql.tree.WithQuery;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
class AstBuilder
extends SqlBaseBaseVisitor<Node>
{
private int parameterPosition;
private final ParsingOptions parsingOptions;
AstBuilder(ParsingOptions parsingOptions)
{
this.parsingOptions = requireNonNull(parsingOptions, "parsingOptions is null");
}
@Override
public Node visitSingleStatement(SqlBaseParser.SingleStatementContext context)
{
return visit(context.statement());
}
@Override
public Node visitStandaloneExpression(SqlBaseParser.StandaloneExpressionContext context)
{
return visit(context.expression());
}
@Override
public Node visitStandalonePathSpecification(SqlBaseParser.StandalonePathSpecificationContext context)
{
return visit(context.pathSpecification());
}
// ******************* statements **********************
@Override
public Node visitUse(SqlBaseParser.UseContext context)
{
return new Use(
getLocation(context),
visitIfPresent(context.catalog, Identifier.class),
(Identifier) visit(context.schema));
}
@Override
public Node visitCreateSchema(SqlBaseParser.CreateSchemaContext context)
{
List<Property> properties = ImmutableList.of();
if (context.properties() != null) {
properties = visit(context.properties().property(), Property.class);
}
return new CreateSchema(
getLocation(context),
getQualifiedName(context.qualifiedName()),
context.EXISTS() != null,
properties);
}
@Override
public Node visitDropSchema(SqlBaseParser.DropSchemaContext context)
{
return new DropSchema(
getLocation(context),
getQualifiedName(context.qualifiedName()),
context.EXISTS() != null,
context.CASCADE() != null);
}
@Override
public Node visitRenameSchema(SqlBaseParser.RenameSchemaContext context)
{
return new RenameSchema(
getLocation(context),
getQualifiedName(context.qualifiedName()),
(Identifier) visit(context.identifier()));
}
@Override
public Node visitCreateTableAsSelect(SqlBaseParser.CreateTableAsSelectContext context)
{
Optional<String> comment = Optional.empty();
if (context.COMMENT() != null) {
comment = Optional.of(((StringLiteral) visit(context.string())).getValue());
}
Optional<List<Identifier>> columnAliases = Optional.empty();
if (context.columnAliases() != null) {
columnAliases = Optional.of(visit(context.columnAliases().identifier(), Identifier.class));
}
List<Property> properties = ImmutableList.of();
if (context.properties() != null) {
properties = visit(context.properties().property(), Property.class);
}
return new CreateTableAsSelect(
getLocation(context),
getQualifiedName(context.qualifiedName()),
(Query) visit(context.query()),
context.EXISTS() != null,
properties,
context.NO() == null,
columnAliases,
comment);
}
@Override
public Node visitCreateTable(SqlBaseParser.CreateTableContext context)
{
Optional<String> comment = Optional.empty();
if (context.COMMENT() != null) {
comment = Optional.of(((StringLiteral) visit(context.string())).getValue());
}
List<Property> properties = ImmutableList.of();
if (context.properties() != null) {
properties = visit(context.properties().property(), Property.class);
}
return new CreateTable(
getLocation(context),
getQualifiedName(context.qualifiedName()),
visit(context.tableElement(), TableElement.class),
context.EXISTS() != null,
properties,
comment);
}
@Override
public Node visitShowCreateTable(SqlBaseParser.ShowCreateTableContext context)
{
return new ShowCreate(getLocation(context), ShowCreate.Type.TABLE, getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitDropTable(SqlBaseParser.DropTableContext context)
{
return new DropTable(getLocation(context), getQualifiedName(context.qualifiedName()), context.EXISTS() != null);
}
@Override
public Node visitDropView(SqlBaseParser.DropViewContext context)
{
return new DropView(getLocation(context), getQualifiedName(context.qualifiedName()), context.EXISTS() != null);
}
@Override
public Node visitInsertInto(SqlBaseParser.InsertIntoContext context)
{
Optional<List<Identifier>> columnAliases = Optional.empty();
if (context.columnAliases() != null) {
columnAliases = Optional.of(visit(context.columnAliases().identifier(), Identifier.class));
}
return new Insert(
getQualifiedName(context.qualifiedName()),
columnAliases,
(Query) visit(context.query()));
}
@Override
public Node visitDelete(SqlBaseParser.DeleteContext context)
{
return new Delete(
getLocation(context),
new Table(getLocation(context), getQualifiedName(context.qualifiedName())),
visitIfPresent(context.booleanExpression(), Expression.class));
}
@Override
public Node visitRenameTable(SqlBaseParser.RenameTableContext context)
{
return new RenameTable(getLocation(context), getQualifiedName(context.from), getQualifiedName(context.to));
}
@Override
public Node visitRenameColumn(SqlBaseParser.RenameColumnContext context)
{
return new RenameColumn(
getLocation(context),
getQualifiedName(context.tableName),
(Identifier) visit(context.from),
(Identifier) visit(context.to));
}
@Override
public Node visitAnalyze(SqlBaseParser.AnalyzeContext context)
{
List<Property> properties = ImmutableList.of();
if (context.properties() != null) {
properties = visit(context.properties().property(), Property.class);
}
return new Analyze(
getLocation(context),
getQualifiedName(context.qualifiedName()),
properties);
}
@Override
public Node visitAddColumn(SqlBaseParser.AddColumnContext context)
{
return new AddColumn(getLocation(context), getQualifiedName(context.qualifiedName()), (ColumnDefinition) visit(context.columnDefinition()));
}
@Override
public Node visitDropColumn(SqlBaseParser.DropColumnContext context)
{
return new DropColumn(getLocation(context), getQualifiedName(context.tableName), (Identifier) visit(context.column));
}
@Override
public Node visitCreateView(SqlBaseParser.CreateViewContext context)
{
return new CreateView(
getLocation(context),
getQualifiedName(context.qualifiedName()),
(Query) visit(context.query()),
context.REPLACE() != null);
}
@Override
public Node visitStartTransaction(SqlBaseParser.StartTransactionContext context)
{
return new StartTransaction(visit(context.transactionMode(), TransactionMode.class));
}
@Override
public Node visitCommit(SqlBaseParser.CommitContext context)
{
return new Commit(getLocation(context));
}
@Override
public Node visitRollback(SqlBaseParser.RollbackContext context)
{
return new Rollback(getLocation(context));
}
@Override
public Node visitTransactionAccessMode(SqlBaseParser.TransactionAccessModeContext context)
{
return new TransactionAccessMode(getLocation(context), context.accessMode.getType() == SqlBaseLexer.ONLY);
}
@Override
public Node visitIsolationLevel(SqlBaseParser.IsolationLevelContext context)
{
return visit(context.levelOfIsolation());
}
@Override
public Node visitReadUncommitted(SqlBaseParser.ReadUncommittedContext context)
{
return new Isolation(getLocation(context), Isolation.Level.READ_UNCOMMITTED);
}
@Override
public Node visitReadCommitted(SqlBaseParser.ReadCommittedContext context)
{
return new Isolation(getLocation(context), Isolation.Level.READ_COMMITTED);
}
@Override
public Node visitRepeatableRead(SqlBaseParser.RepeatableReadContext context)
{
return new Isolation(getLocation(context), Isolation.Level.REPEATABLE_READ);
}
@Override
public Node visitSerializable(SqlBaseParser.SerializableContext context)
{
return new Isolation(getLocation(context), Isolation.Level.SERIALIZABLE);
}
@Override
public Node visitCall(SqlBaseParser.CallContext context)
{
return new Call(
getLocation(context),
getQualifiedName(context.qualifiedName()),
visit(context.callArgument(), CallArgument.class));
}
@Override
public Node visitPrepare(SqlBaseParser.PrepareContext context)
{
return new Prepare(
getLocation(context),
(Identifier) visit(context.identifier()),
(Statement) visit(context.statement()));
}
@Override
public Node visitDeallocate(SqlBaseParser.DeallocateContext context)
{
return new Deallocate(
getLocation(context),
(Identifier) visit(context.identifier()));
}
@Override
public Node visitExecute(SqlBaseParser.ExecuteContext context)
{
return new Execute(
getLocation(context),
(Identifier) visit(context.identifier()),
visit(context.expression(), Expression.class));
}
@Override
public Node visitDescribeOutput(SqlBaseParser.DescribeOutputContext context)
{
return new DescribeOutput(
getLocation(context),
(Identifier) visit(context.identifier()));
}
@Override
public Node visitDescribeInput(SqlBaseParser.DescribeInputContext context)
{
return new DescribeInput(
getLocation(context),
(Identifier) visit(context.identifier()));
}
@Override
public Node visitProperty(SqlBaseParser.PropertyContext context)
{
return new Property(getLocation(context), (Identifier) visit(context.identifier()), (Expression) visit(context.expression()));
}
// ********************** query expressions ********************
@Override
public Node visitQuery(SqlBaseParser.QueryContext context)
{
Query body = (Query) visit(context.queryNoWith());
return new Query(
getLocation(context),
visitIfPresent(context.with(), With.class),
body.getQueryBody(),
body.getOrderBy(),
body.getLimit());
}
@Override
public Node visitWith(SqlBaseParser.WithContext context)
{
return new With(getLocation(context), context.RECURSIVE() != null, visit(context.namedQuery(), WithQuery.class));
}
@Override
public Node visitNamedQuery(SqlBaseParser.NamedQueryContext context)
{
Optional<List<Identifier>> columns = Optional.empty();
if (context.columnAliases() != null) {
columns = Optional.of(visit(context.columnAliases().identifier(), Identifier.class));
}
return new WithQuery(
getLocation(context),
(Identifier) visit(context.name),
(Query) visit(context.query()),
columns);
}
@Override
public Node visitQueryNoWith(SqlBaseParser.QueryNoWithContext context)
{
QueryBody term = (QueryBody) visit(context.queryTerm());
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(getLocation(context.ORDER()), visit(context.sortItem(), SortItem.class)));
}
if (term instanceof QuerySpecification) {
// When we have a simple query specification
// followed by order by limit, fold the order by and limit
// clauses into the query specification (analyzer/planner
// expects this structure to resolve references with respect
// to columns defined in the query specification)
QuerySpecification query = (QuerySpecification) term;
return new Query(
getLocation(context),
Optional.empty(),
new QuerySpecification(
getLocation(context),
query.getSelect(),
query.getFrom(),
query.getWhere(),
query.getGroupBy(),
query.getHaving(),
orderBy,
getTextIfPresent(context.limit)),
Optional.empty(),
Optional.empty());
}
return new Query(
getLocation(context),
Optional.empty(),
term,
orderBy,
getTextIfPresent(context.limit));
}
@Override
public Node visitQuerySpecification(SqlBaseParser.QuerySpecificationContext context)
{
Optional<Relation> from = Optional.empty();
List<SelectItem> selectItems = visit(context.selectItem(), SelectItem.class);
List<Relation> relations = visit(context.relation(), Relation.class);
if (!relations.isEmpty()) {
// synthesize implicit join nodes
Iterator<Relation> iterator = relations.iterator();
Relation relation = iterator.next();
while (iterator.hasNext()) {
relation = new Join(getLocation(context), Join.Type.IMPLICIT, relation, iterator.next(), Optional.empty());
}
from = Optional.of(relation);
}
return new QuerySpecification(
getLocation(context),
new Select(getLocation(context.SELECT()), isDistinct(context.setQuantifier()), selectItems),
from,
visitIfPresent(context.where, Expression.class),
visitIfPresent(context.groupBy(), GroupBy.class),
visitIfPresent(context.having, Expression.class),
Optional.empty(),
Optional.empty());
}
@Override
public Node visitGroupBy(SqlBaseParser.GroupByContext context)
{
return new GroupBy(getLocation(context), isDistinct(context.setQuantifier()), visit(context.groupingElement(), GroupingElement.class));
}
@Override
public Node visitSingleGroupingSet(SqlBaseParser.SingleGroupingSetContext context)
{
return new SimpleGroupBy(getLocation(context), visit(context.groupingSet().expression(), Expression.class));
}
@Override
public Node visitRollup(SqlBaseParser.RollupContext context)
{
return new Rollup(getLocation(context), visit(context.expression(), Expression.class));
}
@Override
public Node visitCube(SqlBaseParser.CubeContext context)
{
return new Cube(getLocation(context), visit(context.expression(), Expression.class));
}
@Override
public Node visitMultipleGroupingSets(SqlBaseParser.MultipleGroupingSetsContext context)
{
return new GroupingSets(getLocation(context), context.groupingSet().stream()
.map(groupingSet -> visit(groupingSet.expression(), Expression.class))
.collect(toList()));
}
@Override
public Node visitSetOperation(SqlBaseParser.SetOperationContext context)
{
QueryBody left = (QueryBody) visit(context.left);
QueryBody right = (QueryBody) visit(context.right);
boolean distinct = context.setQuantifier() == null || context.setQuantifier().DISTINCT() != null;
switch (context.operator.getType()) {
case SqlBaseLexer.UNION:
return new Union(getLocation(context.UNION()), ImmutableList.of(left, right), distinct);
case SqlBaseLexer.INTERSECT:
return new Intersect(getLocation(context.INTERSECT()), ImmutableList.of(left, right), distinct);
case SqlBaseLexer.EXCEPT:
return new Except(getLocation(context.EXCEPT()), left, right, distinct);
}
throw new IllegalArgumentException("Unsupported set operation: " + context.operator.getText());
}
@Override
public Node visitSelectAll(SqlBaseParser.SelectAllContext context)
{
if (context.qualifiedName() != null) {
return new AllColumns(getLocation(context), getQualifiedName(context.qualifiedName()));
}
return new AllColumns(getLocation(context));
}
@Override
public Node visitSelectSingle(SqlBaseParser.SelectSingleContext context)
{
return new SingleColumn(
getLocation(context),
(Expression) visit(context.expression()),
visitIfPresent(context.identifier(), Identifier.class));
}
@Override
public Node visitTable(SqlBaseParser.TableContext context)
{
return new Table(getLocation(context), getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitSubquery(SqlBaseParser.SubqueryContext context)
{
return new TableSubquery(getLocation(context), (Query) visit(context.queryNoWith()));
}
@Override
public Node visitInlineTable(SqlBaseParser.InlineTableContext context)
{
return new Values(getLocation(context), visit(context.expression(), Expression.class));
}
@Override
public Node visitExplain(SqlBaseParser.ExplainContext context)
{
return new Explain(getLocation(context), context.ANALYZE() != null, context.VERBOSE() != null, (Statement) visit(context.statement()), visit(context.explainOption(), ExplainOption.class));
}
@Override
public Node visitExplainFormat(SqlBaseParser.ExplainFormatContext context)
{
switch (context.value.getType()) {
case SqlBaseLexer.GRAPHVIZ:
return new ExplainFormat(getLocation(context), ExplainFormat.Type.GRAPHVIZ);
case SqlBaseLexer.TEXT:
return new ExplainFormat(getLocation(context), ExplainFormat.Type.TEXT);
case SqlBaseLexer.JSON:
return new ExplainFormat(getLocation(context), ExplainFormat.Type.JSON);
}
throw new IllegalArgumentException("Unsupported EXPLAIN format: " + context.value.getText());
}
@Override
public Node visitExplainType(SqlBaseParser.ExplainTypeContext context)
{
switch (context.value.getType()) {
case SqlBaseLexer.LOGICAL:
return new ExplainType(getLocation(context), ExplainType.Type.LOGICAL);
case SqlBaseLexer.DISTRIBUTED:
return new ExplainType(getLocation(context), ExplainType.Type.DISTRIBUTED);
case SqlBaseLexer.VALIDATE:
return new ExplainType(getLocation(context), ExplainType.Type.VALIDATE);
case SqlBaseLexer.IO:
return new ExplainType(getLocation(context), ExplainType.Type.IO);
}
throw new IllegalArgumentException("Unsupported EXPLAIN type: " + context.value.getText());
}
@Override
public Node visitShowTables(SqlBaseParser.ShowTablesContext context)
{
return new ShowTables(
getLocation(context),
Optional.ofNullable(context.qualifiedName())
.map(this::getQualifiedName),
getTextIfPresent(context.pattern)
.map(AstBuilder::unquote),
getTextIfPresent(context.escape)
.map(AstBuilder::unquote));
}
@Override
public Node visitShowSchemas(SqlBaseParser.ShowSchemasContext context)
{
return new ShowSchemas(
getLocation(context),
visitIfPresent(context.identifier(), Identifier.class),
getTextIfPresent(context.pattern)
.map(AstBuilder::unquote),
getTextIfPresent(context.escape)
.map(AstBuilder::unquote));
}
@Override
public Node visitShowCatalogs(SqlBaseParser.ShowCatalogsContext context)
{
return new ShowCatalogs(getLocation(context),
getTextIfPresent(context.pattern)
.map(AstBuilder::unquote));
}
@Override
public Node visitShowColumns(SqlBaseParser.ShowColumnsContext context)
{
return new ShowColumns(getLocation(context), getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitShowStats(SqlBaseParser.ShowStatsContext context)
{
return new ShowStats(Optional.of(getLocation(context)), new Table(getQualifiedName(context.qualifiedName())));
}
@Override
public Node visitShowStatsForQuery(SqlBaseParser.ShowStatsForQueryContext context)
{
QuerySpecification specification = (QuerySpecification) visitQuerySpecification(context.querySpecification());
Query query = new Query(Optional.empty(), specification, Optional.empty(), Optional.empty());
return new ShowStats(Optional.of(getLocation(context)), new TableSubquery(query));
}
@Override
public Node visitShowCreateView(SqlBaseParser.ShowCreateViewContext context)
{
return new ShowCreate(getLocation(context), ShowCreate.Type.VIEW, getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitShowFunctions(SqlBaseParser.ShowFunctionsContext context)
{
return new ShowFunctions(getLocation(context));
}
@Override
public Node visitShowSession(SqlBaseParser.ShowSessionContext context)
{
return new ShowSession(getLocation(context));
}
@Override
public Node visitSetSession(SqlBaseParser.SetSessionContext context)
{
return new SetSession(getLocation(context), getQualifiedName(context.qualifiedName()), (Expression) visit(context.expression()));
}
@Override
public Node visitResetSession(SqlBaseParser.ResetSessionContext context)
{
return new ResetSession(getLocation(context), getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitGrant(SqlBaseParser.GrantContext context)
{
Optional<List<String>> privileges;
if (context.ALL() != null) {
privileges = Optional.empty();
}
else {
privileges = Optional.of(context.privilege().stream()
.map(SqlBaseParser.PrivilegeContext::getText)
.collect(toList()));
}
return new Grant(
getLocation(context),
privileges,
context.TABLE() != null,
getQualifiedName(context.qualifiedName()),
(Identifier) visit(context.grantee),
context.OPTION() != null);
}
@Override
public Node visitRevoke(SqlBaseParser.RevokeContext context)
{
Optional<List<String>> privileges;
if (context.ALL() != null) {
privileges = Optional.empty();
}
else {
privileges = Optional.of(context.privilege().stream()
.map(SqlBaseParser.PrivilegeContext::getText)
.collect(toList()));
}
return new Revoke(
getLocation(context),
context.OPTION() != null,
privileges,
context.TABLE() != null,
getQualifiedName(context.qualifiedName()),
(Identifier) visit(context.grantee));
}
@Override
public Node visitShowGrants(SqlBaseParser.ShowGrantsContext context)
{
Optional<QualifiedName> tableName = Optional.empty();
if (context.qualifiedName() != null) {
tableName = Optional.of(getQualifiedName(context.qualifiedName()));
}
return new ShowGrants(
getLocation(context),
context.TABLE() != null,
tableName);
}
@Override
public Node visitSetPath(SqlBaseParser.SetPathContext context)
{
return new SetPath(getLocation(context), (PathSpecification) visit(context.pathSpecification()));
}
// ***************** boolean expressions ******************
@Override
public Node visitLogicalNot(SqlBaseParser.LogicalNotContext context)
{
return new NotExpression(getLocation(context), (Expression) visit(context.booleanExpression()));
}
@Override
public Node visitLogicalBinary(SqlBaseParser.LogicalBinaryContext context)
{
return new LogicalBinaryExpression(
getLocation(context.operator),
getLogicalBinaryOperator(context.operator),
(Expression) visit(context.left),
(Expression) visit(context.right));
}
// *************** from clause *****************
@Override
public Node visitJoinRelation(SqlBaseParser.JoinRelationContext context)
{
Relation left = (Relation) visit(context.left);
Relation right;
if (context.CROSS() != null) {
right = (Relation) visit(context.right);
return new Join(getLocation(context), Join.Type.CROSS, left, right, Optional.empty());
}
JoinCriteria criteria;
if (context.NATURAL() != null) {
right = (Relation) visit(context.right);
criteria = new NaturalJoin();
}
else {
right = (Relation) visit(context.rightRelation);
if (context.joinCriteria().ON() != null) {
criteria = new JoinOn((Expression) visit(context.joinCriteria().booleanExpression()));
}
else if (context.joinCriteria().USING() != null) {
criteria = new JoinUsing(visit(context.joinCriteria().identifier(), Identifier.class));
}
else {
throw new IllegalArgumentException("Unsupported join criteria");
}
}
Join.Type joinType;
if (context.joinType().LEFT() != null) {
joinType = Join.Type.LEFT;
}
else if (context.joinType().RIGHT() != null) {
joinType = Join.Type.RIGHT;
}
else if (context.joinType().FULL() != null) {
joinType = Join.Type.FULL;
}
else {
joinType = Join.Type.INNER;
}
return new Join(getLocation(context), joinType, left, right, Optional.of(criteria));
}
@Override
public Node visitSampledRelation(SqlBaseParser.SampledRelationContext context)
{
Relation child = (Relation) visit(context.aliasedRelation());
if (context.TABLESAMPLE() == null) {
return child;
}
return new SampledRelation(
getLocation(context),
child,
getSamplingMethod((Token) context.sampleType().getChild(0).getPayload()),
(Expression) visit(context.percentage));
}
@Override
public Node visitAliasedRelation(SqlBaseParser.AliasedRelationContext context)
{
Relation child = (Relation) visit(context.relationPrimary());
if (context.identifier() == null) {
return child;
}
List<Identifier> aliases = null;
if (context.columnAliases() != null) {
aliases = visit(context.columnAliases().identifier(), Identifier.class);
}
return new AliasedRelation(getLocation(context), child, (Identifier) visit(context.identifier()), aliases);
}
@Override
public Node visitTableName(SqlBaseParser.TableNameContext context)
{
return new Table(getLocation(context), getQualifiedName(context.qualifiedName()));
}
@Override
public Node visitSubqueryRelation(SqlBaseParser.SubqueryRelationContext context)
{
return new TableSubquery(getLocation(context), (Query) visit(context.query()));
}
@Override
public Node visitUnnest(SqlBaseParser.UnnestContext context)
{
return new Unnest(getLocation(context), visit(context.expression(), Expression.class), context.ORDINALITY() != null);
}
@Override
public Node visitLateral(SqlBaseParser.LateralContext context)
{
return new Lateral(getLocation(context), (Query) visit(context.query()));
}
@Override
public Node visitParenthesizedRelation(SqlBaseParser.ParenthesizedRelationContext context)
{
return visit(context.relation());
}
// ********************* predicates *******************
@Override
public Node visitPredicated(SqlBaseParser.PredicatedContext context)
{
if (context.predicate() != null) {
return visit(context.predicate());
}
return visit(context.valueExpression);
}
@Override
public Node visitComparison(SqlBaseParser.ComparisonContext context)
{
return new ComparisonExpression(
getLocation(context.comparisonOperator()),
getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)).getSymbol()),
(Expression) visit(context.value),
(Expression) visit(context.right));
}
@Override
public Node visitDistinctFrom(SqlBaseParser.DistinctFromContext context)
{
Expression expression = new ComparisonExpression(
getLocation(context),
ComparisonExpression.Operator.IS_DISTINCT_FROM,
(Expression) visit(context.value),
(Expression) visit(context.right));
if (context.NOT() != null) {
expression = new NotExpression(getLocation(context), expression);
}
return expression;
}
@Override
public Node visitBetween(SqlBaseParser.BetweenContext context)
{
Expression expression = new BetweenPredicate(
getLocation(context),
(Expression) visit(context.value),
(Expression) visit(context.lower),
(Expression) visit(context.upper));
if (context.NOT() != null) {
expression = new NotExpression(getLocation(context), expression);
}
return expression;
}
@Override
public Node visitNullPredicate(SqlBaseParser.NullPredicateContext context)
{
Expression child = (Expression) visit(context.value);
if (context.NOT() == null) {
return new IsNullPredicate(getLocation(context), child);
}
return new IsNotNullPredicate(getLocation(context), child);
}
@Override
public Node visitLike(SqlBaseParser.LikeContext context)
{
Expression result = new LikePredicate(
getLocation(context),
(Expression) visit(context.value),
(Expression) visit(context.pattern),
visitIfPresent(context.escape, Expression.class));
if (context.NOT() != null) {
result = new NotExpression(getLocation(context), result);
}
return result;
}
@Override
public Node visitInList(SqlBaseParser.InListContext context)
{
Expression result = new InPredicate(
getLocation(context),
(Expression) visit(context.value),
new InListExpression(getLocation(context), visit(context.expression(), Expression.class)));
if (context.NOT() != null) {
result = new NotExpression(getLocation(context), result);
}
return result;
}
@Override
public Node visitInSubquery(SqlBaseParser.InSubqueryContext context)
{
Expression result = new InPredicate(
getLocation(context),
(Expression) visit(context.value),
new SubqueryExpression(getLocation(context), (Query) visit(context.query())));
if (context.NOT() != null) {
result = new NotExpression(getLocation(context), result);
}
return result;
}
@Override
public Node visitExists(SqlBaseParser.ExistsContext context)
{
return new ExistsPredicate(getLocation(context), new SubqueryExpression(getLocation(context), (Query) visit(context.query())));
}
@Override
public Node visitQuantifiedComparison(SqlBaseParser.QuantifiedComparisonContext context)
{
return new QuantifiedComparisonExpression(
getLocation(context.comparisonOperator()),
getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)).getSymbol()),
getComparisonQuantifier(((TerminalNode) context.comparisonQuantifier().getChild(0)).getSymbol()),
(Expression) visit(context.value),
new SubqueryExpression(getLocation(context.query()), (Query) visit(context.query())));
}
// ************** value expressions **************
@Override
public Node visitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext context)
{
Expression child = (Expression) visit(context.valueExpression());
switch (context.operator.getType()) {
case SqlBaseLexer.MINUS:
return ArithmeticUnaryExpression.negative(getLocation(context), child);
case SqlBaseLexer.PLUS:
return ArithmeticUnaryExpression.positive(getLocation(context), child);
default:
throw new UnsupportedOperationException("Unsupported sign: " + context.operator.getText());
}
}
@Override
public Node visitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext context)
{
return new ArithmeticBinaryExpression(
getLocation(context.operator),
getArithmeticBinaryOperator(context.operator),
(Expression) visit(context.left),
(Expression) visit(context.right));
}
@Override
public Node visitConcatenation(SqlBaseParser.ConcatenationContext context)
{
return new FunctionCall(
getLocation(context.CONCAT()),
QualifiedName.of("concat"), ImmutableList.of(
(Expression) visit(context.left),
(Expression) visit(context.right)));
}
@Override
public Node visitAtTimeZone(SqlBaseParser.AtTimeZoneContext context)
{
return new AtTimeZone(
getLocation(context.AT()),
(Expression) visit(context.valueExpression()),
(Expression) visit(context.timeZoneSpecifier()));
}
@Override
public Node visitTimeZoneInterval(SqlBaseParser.TimeZoneIntervalContext context)
{
return visit(context.interval());
}
@Override
public Node visitTimeZoneString(SqlBaseParser.TimeZoneStringContext context)
{
return visit(context.string());
}
// ********************* primary expressions **********************
@Override
public Node visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext context)
{
return visit(context.expression());
}
@Override
public Node visitRowConstructor(SqlBaseParser.RowConstructorContext context)
{
return new Row(getLocation(context), visit(context.expression(), Expression.class));
}
@Override
public Node visitArrayConstructor(SqlBaseParser.ArrayConstructorContext context)
{
return new ArrayConstructor(getLocation(context), visit(context.expression(), Expression.class));
}
@Override
public Node visitCast(SqlBaseParser.CastContext context)
{
boolean isTryCast = context.TRY_CAST() != null;
return new Cast(getLocation(context), (Expression) visit(context.expression()), getType(context.type()), isTryCast);
}
@Override
public Node visitSpecialDateTimeFunction(SqlBaseParser.SpecialDateTimeFunctionContext context)
{
CurrentTime.Function function = getDateTimeFunctionType(context.name);
if (context.precision != null) {
return new CurrentTime(getLocation(context), function, Integer.parseInt(context.precision.getText()));
}
return new CurrentTime(getLocation(context), function);
}
@Override
public Node visitCurrentUser(SqlBaseParser.CurrentUserContext context)
{
return new CurrentUser(getLocation(context.CURRENT_USER()));
}
@Override
public Node visitCurrentPath(SqlBaseParser.CurrentPathContext context)
{
return new CurrentPath(getLocation(context.CURRENT_PATH()));
}
@Override
public Node visitExtract(SqlBaseParser.ExtractContext context)
{
String fieldString = context.identifier().getText();
Extract.Field field;
try {
field = Extract.Field.valueOf(fieldString.toUpperCase());
}
catch (IllegalArgumentException e) {
throw parseError("Invalid EXTRACT field: " + fieldString, context);
}
return new Extract(getLocation(context), (Expression) visit(context.valueExpression()), field);
}
@Override
public Node visitSubstring(SqlBaseParser.SubstringContext context)
{
return new FunctionCall(getLocation(context), QualifiedName.of("substr"), visit(context.valueExpression(), Expression.class));
}
@Override
public Node visitPosition(SqlBaseParser.PositionContext context)
{
List<Expression> arguments = Lists.reverse(visit(context.valueExpression(), Expression.class));
return new FunctionCall(getLocation(context), QualifiedName.of("strpos"), arguments);
}
@Override
public Node visitNormalize(SqlBaseParser.NormalizeContext context)
{
Expression str = (Expression) visit(context.valueExpression());
String normalForm = Optional.ofNullable(context.normalForm()).map(ParserRuleContext::getText).orElse("NFC");
return new FunctionCall(getLocation(context), QualifiedName.of("normalize"), ImmutableList.of(str, new StringLiteral(getLocation(context), normalForm)));
}
@Override
public Node visitSubscript(SqlBaseParser.SubscriptContext context)
{
return new SubscriptExpression(getLocation(context), (Expression) visit(context.value), (Expression) visit(context.index));
}
@Override
public Node visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext context)
{
return new SubqueryExpression(getLocation(context), (Query) visit(context.query()));
}
@Override
public Node visitDereference(SqlBaseParser.DereferenceContext context)
{
return new DereferenceExpression(
getLocation(context),
(Expression) visit(context.base),
(Identifier) visit(context.fieldName));
}
@Override
public Node visitColumnReference(SqlBaseParser.ColumnReferenceContext context)
{
return visit(context.identifier());
}
@Override
public Node visitSimpleCase(SqlBaseParser.SimpleCaseContext context)
{
return new SimpleCaseExpression(
getLocation(context),
(Expression) visit(context.valueExpression()),
visit(context.whenClause(), WhenClause.class),
visitIfPresent(context.elseExpression, Expression.class));
}
@Override
public Node visitSearchedCase(SqlBaseParser.SearchedCaseContext context)
{
return new SearchedCaseExpression(
getLocation(context),
visit(context.whenClause(), WhenClause.class),
visitIfPresent(context.elseExpression, Expression.class));
}
@Override
public Node visitWhenClause(SqlBaseParser.WhenClauseContext context)
{
return new WhenClause(getLocation(context), (Expression) visit(context.condition), (Expression) visit(context.result));
}
@Override
public Node visitFunctionCall(SqlBaseParser.FunctionCallContext context)
{
Optional<Expression> filter = visitIfPresent(context.filter(), Expression.class);
Optional<Window> window = visitIfPresent(context.over(), Window.class);
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(visit(context.sortItem(), SortItem.class)));
}
QualifiedName name = getQualifiedName(context.qualifiedName());
boolean distinct = isDistinct(context.setQuantifier());
if (name.toString().equalsIgnoreCase("if")) {
check(context.expression().size() == 2 || context.expression().size() == 3, "Invalid number of arguments for 'if' function", context);
check(!window.isPresent(), "OVER clause not valid for 'if' function", context);
check(!distinct, "DISTINCT not valid for 'if' function", context);
check(!filter.isPresent(), "FILTER not valid for 'if' function", context);
Expression elseExpression = null;
if (context.expression().size() == 3) {
elseExpression = (Expression) visit(context.expression(2));
}
return new IfExpression(
getLocation(context),
(Expression) visit(context.expression(0)),
(Expression) visit(context.expression(1)),
elseExpression);
}
if (name.toString().equalsIgnoreCase("nullif")) {
check(context.expression().size() == 2, "Invalid number of arguments for 'nullif' function", context);
check(!window.isPresent(), "OVER clause not valid for 'nullif' function", context);
check(!distinct, "DISTINCT not valid for 'nullif' function", context);
check(!filter.isPresent(), "FILTER not valid for 'nullif' function", context);
return new NullIfExpression(
getLocation(context),
(Expression) visit(context.expression(0)),
(Expression) visit(context.expression(1)));
}
if (name.toString().equalsIgnoreCase("coalesce")) {
check(context.expression().size() >= 2, "The 'coalesce' function must have at least two arguments", context);
check(!window.isPresent(), "OVER clause not valid for 'coalesce' function", context);
check(!distinct, "DISTINCT not valid for 'coalesce' function", context);
check(!filter.isPresent(), "FILTER not valid for 'coalesce' function", context);
return new CoalesceExpression(getLocation(context), visit(context.expression(), Expression.class));
}
if (name.toString().equalsIgnoreCase("try")) {
check(context.expression().size() == 1, "The 'try' function must have exactly one argument", context);
check(!window.isPresent(), "OVER clause not valid for 'try' function", context);
check(!distinct, "DISTINCT not valid for 'try' function", context);
check(!filter.isPresent(), "FILTER not valid for 'try' function", context);
return new TryExpression(getLocation(context), (Expression) visit(getOnlyElement(context.expression())));
}
if (name.toString().equalsIgnoreCase("$internal$bind")) {
check(context.expression().size() >= 1, "The '$internal$bind' function must have at least one arguments", context);
check(!window.isPresent(), "OVER clause not valid for '$internal$bind' function", context);
check(!distinct, "DISTINCT not valid for '$internal$bind' function", context);
check(!filter.isPresent(), "FILTER not valid for '$internal$bind' function", context);
int numValues = context.expression().size() - 1;
List<Expression> arguments = context.expression().stream()
.map(this::visit)
.map(Expression.class::cast)
.collect(toImmutableList());
return new BindExpression(
getLocation(context),
arguments.subList(0, numValues),
arguments.get(numValues));
}
return new FunctionCall(
getLocation(context),
getQualifiedName(context.qualifiedName()),
window,
filter,
orderBy,
distinct,
visit(context.expression(), Expression.class));
}
@Override
public Node visitLambda(SqlBaseParser.LambdaContext context)
{
List<LambdaArgumentDeclaration> arguments = visit(context.identifier(), Identifier.class).stream()
.map(LambdaArgumentDeclaration::new)
.collect(toList());
Expression body = (Expression) visit(context.expression());
return new LambdaExpression(getLocation(context), arguments, body);
}
@Override
public Node visitFilter(SqlBaseParser.FilterContext context)
{
return visit(context.booleanExpression());
}
@Override
public Node visitOver(SqlBaseParser.OverContext context)
{
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(getLocation(context.ORDER()), visit(context.sortItem(), SortItem.class)));
}
return new Window(
getLocation(context),
visit(context.partition, Expression.class),
orderBy,
visitIfPresent(context.windowFrame(), WindowFrame.class));
}
@Override
public Node visitColumnDefinition(SqlBaseParser.ColumnDefinitionContext context)
{
Optional<String> comment = Optional.empty();
if (context.COMMENT() != null) {
comment = Optional.of(((StringLiteral) visit(context.string())).getValue());
}
List<Property> properties = ImmutableList.of();
if (context.properties() != null) {
properties = visit(context.properties().property(), Property.class);
}
return new ColumnDefinition(
getLocation(context),
(Identifier) visit(context.identifier()),
getType(context.type()),
properties,
comment);
}
@Override
public Node visitLikeClause(SqlBaseParser.LikeClauseContext context)
{
return new LikeClause(
getLocation(context),
getQualifiedName(context.qualifiedName()),
Optional.ofNullable(context.optionType)
.map(AstBuilder::getPropertiesOption));
}
@Override
public Node visitSortItem(SqlBaseParser.SortItemContext context)
{
return new SortItem(
getLocation(context),
(Expression) visit(context.expression()),
Optional.ofNullable(context.ordering)
.map(AstBuilder::getOrderingType)
.orElse(SortItem.Ordering.ASCENDING),
Optional.ofNullable(context.nullOrdering)
.map(AstBuilder::getNullOrderingType)
.orElse(SortItem.NullOrdering.UNDEFINED));
}
@Override
public Node visitWindowFrame(SqlBaseParser.WindowFrameContext context)
{
return new WindowFrame(
getLocation(context),
getFrameType(context.frameType),
(FrameBound) visit(context.start),
visitIfPresent(context.end, FrameBound.class));
}
@Override
public Node visitUnboundedFrame(SqlBaseParser.UnboundedFrameContext context)
{
return new FrameBound(getLocation(context), getUnboundedFrameBoundType(context.boundType));
}
@Override
public Node visitBoundedFrame(SqlBaseParser.BoundedFrameContext context)
{
return new FrameBound(getLocation(context), getBoundedFrameBoundType(context.boundType), (Expression) visit(context.expression()));
}
@Override
public Node visitCurrentRowBound(SqlBaseParser.CurrentRowBoundContext context)
{
return new FrameBound(getLocation(context), FrameBound.Type.CURRENT_ROW);
}
@Override
public Node visitGroupingOperation(SqlBaseParser.GroupingOperationContext context)
{
List<QualifiedName> arguments = context.qualifiedName().stream()
.map(this::getQualifiedName)
.collect(toList());
return new GroupingOperation(Optional.of(getLocation(context)), arguments);
}
@Override
public Node visitUnquotedIdentifier(SqlBaseParser.UnquotedIdentifierContext context)
{
return new Identifier(getLocation(context), context.getText(), false);
}
@Override
public Node visitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext context)
{
String token = context.getText();
String identifier = token.substring(1, token.length() - 1)
.replace("\"\"", "\"");
return new Identifier(getLocation(context), identifier, true);
}
// ************** literals **************
@Override
public Node visitNullLiteral(SqlBaseParser.NullLiteralContext context)
{
return new NullLiteral(getLocation(context));
}
@Override
public Node visitBasicStringLiteral(SqlBaseParser.BasicStringLiteralContext context)
{
return new StringLiteral(getLocation(context), unquote(context.STRING().getText()));
}
@Override
public Node visitUnicodeStringLiteral(SqlBaseParser.UnicodeStringLiteralContext context)
{
return new StringLiteral(getLocation(context), decodeUnicodeLiteral(context));
}
@Override
public Node visitBinaryLiteral(SqlBaseParser.BinaryLiteralContext context)
{
String raw = context.BINARY_LITERAL().getText();
return new BinaryLiteral(getLocation(context), unquote(raw.substring(1)));
}
@Override
public Node visitTypeConstructor(SqlBaseParser.TypeConstructorContext context)
{
String value = ((StringLiteral) visit(context.string())).getValue();
if (context.DOUBLE_PRECISION() != null) {
// TODO: Temporary hack that should be removed with new planner.
return new GenericLiteral(getLocation(context), "DOUBLE", value);
}
String type = context.identifier().getText();
if (type.equalsIgnoreCase("time")) {
return new TimeLiteral(getLocation(context), value);
}
if (type.equalsIgnoreCase("timestamp")) {
return new TimestampLiteral(getLocation(context), value);
}
if (type.equalsIgnoreCase("decimal")) {
return new DecimalLiteral(getLocation(context), value);
}
if (type.equalsIgnoreCase("char")) {
return new CharLiteral(getLocation(context), value);
}
return new GenericLiteral(getLocation(context), type, value);
}
@Override
public Node visitIntegerLiteral(SqlBaseParser.IntegerLiteralContext context)
{
return new LongLiteral(getLocation(context), context.getText());
}
@Override
public Node visitDecimalLiteral(SqlBaseParser.DecimalLiteralContext context)
{
switch (parsingOptions.getDecimalLiteralTreatment()) {
case AS_DOUBLE:
return new DoubleLiteral(getLocation(context), context.getText());
case AS_DECIMAL:
return new DecimalLiteral(getLocation(context), context.getText());
case REJECT:
throw new ParsingException("Unexpected decimal literal: " + context.getText());
}
throw new AssertionError("Unreachable");
}
@Override
public Node visitDoubleLiteral(SqlBaseParser.DoubleLiteralContext context)
{
return new DoubleLiteral(getLocation(context), context.getText());
}
@Override
public Node visitBooleanValue(SqlBaseParser.BooleanValueContext context)
{
return new BooleanLiteral(getLocation(context), context.getText());
}
@Override
public Node visitInterval(SqlBaseParser.IntervalContext context)
{
return new IntervalLiteral(
getLocation(context),
((StringLiteral) visit(context.string())).getValue(),
Optional.ofNullable(context.sign)
.map(AstBuilder::getIntervalSign)
.orElse(IntervalLiteral.Sign.POSITIVE),
getIntervalFieldType((Token) context.from.getChild(0).getPayload()),
Optional.ofNullable(context.to)
.map((x) -> x.getChild(0).getPayload())
.map(Token.class::cast)
.map(AstBuilder::getIntervalFieldType));
}
@Override
public Node visitParameter(SqlBaseParser.ParameterContext context)
{
Parameter parameter = new Parameter(getLocation(context), parameterPosition);
parameterPosition++;
return parameter;
}
// ***************** arguments *****************
@Override
public Node visitPositionalArgument(SqlBaseParser.PositionalArgumentContext context)
{
return new CallArgument(getLocation(context), (Expression) visit(context.expression()));
}
@Override
public Node visitNamedArgument(SqlBaseParser.NamedArgumentContext context)
{
return new CallArgument(getLocation(context), context.identifier().getText(), (Expression) visit(context.expression()));
}
@Override
public Node visitQualifiedArgument(SqlBaseParser.QualifiedArgumentContext context)
{
return new PathElement(getLocation(context), (Identifier) visit(context.identifier(0)), (Identifier) visit(context.identifier(1)));
}
@Override
public Node visitUnqualifiedArgument(SqlBaseParser.UnqualifiedArgumentContext context)
{
return new PathElement(getLocation(context), (Identifier) visit(context.identifier()));
}
@Override
public Node visitPathSpecification(SqlBaseParser.PathSpecificationContext context)
{
return new PathSpecification(getLocation(context), visit(context.pathElement(), PathElement.class));
}
// ***************** helpers *****************
@Override
protected Node defaultResult()
{
return null;
}
@Override
protected Node aggregateResult(Node aggregate, Node nextResult)
{
if (nextResult == null) {
throw new UnsupportedOperationException("not yet implemented");
}
if (aggregate == null) {
return nextResult;
}
throw new UnsupportedOperationException("not yet implemented");
}
private enum UnicodeDecodeState
{
EMPTY,
ESCAPED,
UNICODE_SEQUENCE
}
private static String decodeUnicodeLiteral(SqlBaseParser.UnicodeStringLiteralContext context)
{
char escape;
if (context.UESCAPE() != null) {
String escapeString = unquote(context.STRING().getText());
check(!escapeString.isEmpty(), "Empty Unicode escape character", context);
check(escapeString.length() == 1, "Invalid Unicode escape character: " + escapeString, context);
escape = escapeString.charAt(0);
check(isValidUnicodeEscape(escape), "Invalid Unicode escape character: " + escapeString, context);
}
else {
escape = '\\';
}
String rawContent = unquote(context.UNICODE_STRING().getText().substring(2));
StringBuilder unicodeStringBuilder = new StringBuilder();
StringBuilder escapedCharacterBuilder = new StringBuilder();
int charactersNeeded = 0;
UnicodeDecodeState state = UnicodeDecodeState.EMPTY;
for (int i = 0; i < rawContent.length(); i++) {
char ch = rawContent.charAt(i);
switch (state) {
case EMPTY:
if (ch == escape) {
state = UnicodeDecodeState.ESCAPED;
}
else {
unicodeStringBuilder.append(ch);
}
break;
case ESCAPED:
if (ch == escape) {
unicodeStringBuilder.append(escape);
state = UnicodeDecodeState.EMPTY;
}
else if (ch == '+') {
state = UnicodeDecodeState.UNICODE_SEQUENCE;
charactersNeeded = 6;
}
else if (isHexDigit(ch)) {
state = UnicodeDecodeState.UNICODE_SEQUENCE;
charactersNeeded = 4;
escapedCharacterBuilder.append(ch);
}
else {
throw parseError("Invalid hexadecimal digit: " + ch, context);
}
break;
case UNICODE_SEQUENCE:
check(isHexDigit(ch), "Incomplete escape sequence: " + escapedCharacterBuilder.toString(), context);
escapedCharacterBuilder.append(ch);
if (charactersNeeded == escapedCharacterBuilder.length()) {
String currentEscapedCode = escapedCharacterBuilder.toString();
escapedCharacterBuilder.setLength(0);
int codePoint = Integer.parseInt(currentEscapedCode, 16);
check(Character.isValidCodePoint(codePoint), "Invalid escaped character: " + currentEscapedCode, context);
if (Character.isSupplementaryCodePoint(codePoint)) {
unicodeStringBuilder.appendCodePoint(codePoint);
}
else {
char currentCodePoint = (char) codePoint;
check(!Character.isSurrogate(currentCodePoint), format("Invalid escaped character: %s. Escaped character is a surrogate. Use '\\+123456' instead.", currentEscapedCode), context);
unicodeStringBuilder.append(currentCodePoint);
}
state = UnicodeDecodeState.EMPTY;
charactersNeeded = -1;
}
else {
check(charactersNeeded > escapedCharacterBuilder.length(), "Unexpected escape sequence length: " + escapedCharacterBuilder.length(), context);
}
break;
default:
throw new UnsupportedOperationException();
}
}
check(state == UnicodeDecodeState.EMPTY, "Incomplete escape sequence: " + escapedCharacterBuilder.toString(), context);
return unicodeStringBuilder.toString();
}
private <T> Optional<T> visitIfPresent(ParserRuleContext context, Class<T> clazz)
{
return Optional.ofNullable(context)
.map(this::visit)
.map(clazz::cast);
}
private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz)
{
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(toList());
}
private static String unquote(String value)
{
return value.substring(1, value.length() - 1)
.replace("''", "'");
}
private static LikeClause.PropertiesOption getPropertiesOption(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.INCLUDING:
return LikeClause.PropertiesOption.INCLUDING;
case SqlBaseLexer.EXCLUDING:
return LikeClause.PropertiesOption.EXCLUDING;
}
throw new IllegalArgumentException("Unsupported LIKE option type: " + token.getText());
}
private QualifiedName getQualifiedName(SqlBaseParser.QualifiedNameContext context)
{
List<String> parts = visit(context.identifier(), Identifier.class).stream()
.map(Identifier::getValue) // TODO: preserve quotedness
.collect(Collectors.toList());
return QualifiedName.of(parts);
}
private static boolean isDistinct(SqlBaseParser.SetQuantifierContext setQuantifier)
{
return setQuantifier != null && setQuantifier.DISTINCT() != null;
}
private static boolean isHexDigit(char c)
{
return ((c >= '0') && (c <= '9')) ||
((c >= 'A') && (c <= 'F')) ||
((c >= 'a') && (c <= 'f'));
}
private static boolean isValidUnicodeEscape(char c)
{
return c < 0x7F && c > 0x20 && !isHexDigit(c) && c != '"' && c != '+' && c != '\'';
}
private static Optional<String> getTextIfPresent(ParserRuleContext context)
{
return Optional.ofNullable(context)
.map(ParseTree::getText);
}
private static Optional<String> getTextIfPresent(Token token)
{
return Optional.ofNullable(token)
.map(Token::getText);
}
private static ArithmeticBinaryExpression.Operator getArithmeticBinaryOperator(Token operator)
{
switch (operator.getType()) {
case SqlBaseLexer.PLUS:
return ArithmeticBinaryExpression.Operator.ADD;
case SqlBaseLexer.MINUS:
return ArithmeticBinaryExpression.Operator.SUBTRACT;
case SqlBaseLexer.ASTERISK:
return ArithmeticBinaryExpression.Operator.MULTIPLY;
case SqlBaseLexer.SLASH:
return ArithmeticBinaryExpression.Operator.DIVIDE;
case SqlBaseLexer.PERCENT:
return ArithmeticBinaryExpression.Operator.MODULUS;
}
throw new UnsupportedOperationException("Unsupported operator: " + operator.getText());
}
private static ComparisonExpression.Operator getComparisonOperator(Token symbol)
{
switch (symbol.getType()) {
case SqlBaseLexer.EQ:
return ComparisonExpression.Operator.EQUAL;
case SqlBaseLexer.NEQ:
return ComparisonExpression.Operator.NOT_EQUAL;
case SqlBaseLexer.LT:
return ComparisonExpression.Operator.LESS_THAN;
case SqlBaseLexer.LTE:
return ComparisonExpression.Operator.LESS_THAN_OR_EQUAL;
case SqlBaseLexer.GT:
return ComparisonExpression.Operator.GREATER_THAN;
case SqlBaseLexer.GTE:
return ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL;
}
throw new IllegalArgumentException("Unsupported operator: " + symbol.getText());
}
private static CurrentTime.Function getDateTimeFunctionType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.CURRENT_DATE:
return CurrentTime.Function.DATE;
case SqlBaseLexer.CURRENT_TIME:
return CurrentTime.Function.TIME;
case SqlBaseLexer.CURRENT_TIMESTAMP:
return CurrentTime.Function.TIMESTAMP;
case SqlBaseLexer.LOCALTIME:
return CurrentTime.Function.LOCALTIME;
case SqlBaseLexer.LOCALTIMESTAMP:
return CurrentTime.Function.LOCALTIMESTAMP;
}
throw new IllegalArgumentException("Unsupported special function: " + token.getText());
}
private static IntervalLiteral.IntervalField getIntervalFieldType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.YEAR:
return IntervalLiteral.IntervalField.YEAR;
case SqlBaseLexer.MONTH:
return IntervalLiteral.IntervalField.MONTH;
case SqlBaseLexer.DAY:
return IntervalLiteral.IntervalField.DAY;
case SqlBaseLexer.HOUR:
return IntervalLiteral.IntervalField.HOUR;
case SqlBaseLexer.MINUTE:
return IntervalLiteral.IntervalField.MINUTE;
case SqlBaseLexer.SECOND:
return IntervalLiteral.IntervalField.SECOND;
}
throw new IllegalArgumentException("Unsupported interval field: " + token.getText());
}
private static IntervalLiteral.Sign getIntervalSign(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.MINUS:
return IntervalLiteral.Sign.NEGATIVE;
case SqlBaseLexer.PLUS:
return IntervalLiteral.Sign.POSITIVE;
}
throw new IllegalArgumentException("Unsupported sign: " + token.getText());
}
private static WindowFrame.Type getFrameType(Token type)
{
switch (type.getType()) {
case SqlBaseLexer.RANGE:
return WindowFrame.Type.RANGE;
case SqlBaseLexer.ROWS:
return WindowFrame.Type.ROWS;
}
throw new IllegalArgumentException("Unsupported frame type: " + type.getText());
}
private static FrameBound.Type getBoundedFrameBoundType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.PRECEDING:
return FrameBound.Type.PRECEDING;
case SqlBaseLexer.FOLLOWING:
return FrameBound.Type.FOLLOWING;
}
throw new IllegalArgumentException("Unsupported bound type: " + token.getText());
}
private static FrameBound.Type getUnboundedFrameBoundType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.PRECEDING:
return FrameBound.Type.UNBOUNDED_PRECEDING;
case SqlBaseLexer.FOLLOWING:
return FrameBound.Type.UNBOUNDED_FOLLOWING;
}
throw new IllegalArgumentException("Unsupported bound type: " + token.getText());
}
private static SampledRelation.Type getSamplingMethod(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.BERNOULLI:
return SampledRelation.Type.BERNOULLI;
case SqlBaseLexer.SYSTEM:
return SampledRelation.Type.SYSTEM;
}
throw new IllegalArgumentException("Unsupported sampling method: " + token.getText());
}
private static LogicalBinaryExpression.Operator getLogicalBinaryOperator(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.AND:
return LogicalBinaryExpression.Operator.AND;
case SqlBaseLexer.OR:
return LogicalBinaryExpression.Operator.OR;
}
throw new IllegalArgumentException("Unsupported operator: " + token.getText());
}
private static SortItem.NullOrdering getNullOrderingType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.FIRST:
return SortItem.NullOrdering.FIRST;
case SqlBaseLexer.LAST:
return SortItem.NullOrdering.LAST;
}
throw new IllegalArgumentException("Unsupported ordering: " + token.getText());
}
private static SortItem.Ordering getOrderingType(Token token)
{
switch (token.getType()) {
case SqlBaseLexer.ASC:
return SortItem.Ordering.ASCENDING;
case SqlBaseLexer.DESC:
return SortItem.Ordering.DESCENDING;
}
throw new IllegalArgumentException("Unsupported ordering: " + token.getText());
}
private static QuantifiedComparisonExpression.Quantifier getComparisonQuantifier(Token symbol)
{
switch (symbol.getType()) {
case SqlBaseLexer.ALL:
return QuantifiedComparisonExpression.Quantifier.ALL;
case SqlBaseLexer.ANY:
return QuantifiedComparisonExpression.Quantifier.ANY;
case SqlBaseLexer.SOME:
return QuantifiedComparisonExpression.Quantifier.SOME;
}
throw new IllegalArgumentException("Unsupported quantifier: " + symbol.getText());
}
private String getType(SqlBaseParser.TypeContext type)
{
if (type.baseType() != null) {
String signature = type.baseType().getText();
if (type.baseType().DOUBLE_PRECISION() != null) {
// TODO: Temporary hack that should be removed with new planner.
signature = "DOUBLE";
}
if (!type.typeParameter().isEmpty()) {
String typeParameterSignature = type
.typeParameter()
.stream()
.map(this::typeParameterToString)
.collect(Collectors.joining(","));
signature += "(" + typeParameterSignature + ")";
}
return signature;
}
if (type.ARRAY() != null) {
return "ARRAY(" + getType(type.type(0)) + ")";
}
if (type.MAP() != null) {
return "MAP(" + getType(type.type(0)) + "," + getType(type.type(1)) + ")";
}
if (type.ROW() != null) {
StringBuilder builder = new StringBuilder("(");
for (int i = 0; i < type.identifier().size(); i++) {
if (i != 0) {
builder.append(",");
}
builder.append(visit(type.identifier(i)))
.append(" ")
.append(getType(type.type(i)));
}
builder.append(")");
return "ROW" + builder.toString();
}
if (type.INTERVAL() != null) {
return "INTERVAL " + getIntervalFieldType((Token) type.from.getChild(0).getPayload()) +
" TO " + getIntervalFieldType((Token) type.to.getChild(0).getPayload());
}
throw new IllegalArgumentException("Unsupported type specification: " + type.getText());
}
private String typeParameterToString(SqlBaseParser.TypeParameterContext typeParameter)
{
if (typeParameter.INTEGER_VALUE() != null) {
return typeParameter.INTEGER_VALUE().toString();
}
if (typeParameter.type() != null) {
return getType(typeParameter.type());
}
throw new IllegalArgumentException("Unsupported typeParameter: " + typeParameter.getText());
}
private static void check(boolean condition, String message, ParserRuleContext context)
{
if (!condition) {
throw parseError(message, context);
}
}
public static NodeLocation getLocation(TerminalNode terminalNode)
{
requireNonNull(terminalNode, "terminalNode is null");
return getLocation(terminalNode.getSymbol());
}
public static NodeLocation getLocation(ParserRuleContext parserRuleContext)
{
requireNonNull(parserRuleContext, "parserRuleContext is null");
return getLocation(parserRuleContext.getStart());
}
public static NodeLocation getLocation(Token token)
{
requireNonNull(token, "token is null");
return new NodeLocation(token.getLine(), token.getCharPositionInLine());
}
private static ParsingException parseError(String message, ParserRuleContext context)
{
return new ParsingException(message, null, context.getStart().getLine(), context.getStart().getCharPositionInLine());
}
}
| |
package com.hello.hegberg.warondemand;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
/**
* Activity called when a user clicks on one of the items in their pending bids
* They can accept or reject of the bids
*/
public class AcceptOrRejectBid extends AppCompatActivity {
private ArrayList<Bid> bids = new ArrayList<Bid>();
private static final int PERMISSION_LOCATION_REQUEST_CODE = 1 ;
public static User specificUser;
/**
* ON create for this activity the item clicked is displayed in more detail
* @param savedInstanceState
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_accept_or_reject_bid);
TextView bidderNameText = (TextView) findViewById(R.id.bidder_info);
TextView itemNameText = (TextView) findViewById(R.id.name_info);
TextView bidAmountText = (TextView) findViewById(R.id.bidding_info);
Button acceptBid = (Button) findViewById(R.id.accept_bid);
Button declineBid = (Button) findViewById(R.id.decline_bid);
bidderNameText.setText(BidChooseBid.bidClicked.getBidder().getUsername());
bidAmountText.setText(BidChooseBid.bidClicked.getBidAmount());
itemNameText.setText(BidChooseBid.bidClicked.getItemBidOn().getName());
ImageView imageView = (ImageView) findViewById(R.id.picView);
if(BidChooseBid.bidClicked.getItemBidOn().getThumbnail() != null){
imageView.setImageBitmap(BidChooseBid.bidClicked.getItemBidOn().getThumbnail());
}
/**
* Accepts a bid, sets location and removes all other bids on that object
*/
acceptBid.setOnClickListener(new View.OnClickListener() {
/**
* Onn click acceptbid the bid is set as borrowed
* The item infomation is transfered to the bidder
* The location of the user when accepted is saved and stored in the BD
* The bidder is now a borrower and can get the location of the borrowed item
* @param v
*/
@Override
public void onClick(View v) {
search();
WarItem temp = BiddingChooseItem.bidItemClicked;
BiddingChooseItem.bidItemClicked.setStatus(2);
BiddingChooseItem.bidItemClicked.setBorrower(BidChooseBid.bidClicked.getBidder());
Log.i("size->", String.valueOf(bids.size()));
for(int i = bids.size()-1; i >= 0; i--){
DatabaseController.deleteBids(bids.get(i));
}
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
ActivityCompat.requestPermissions(
getActivity(),
new String[]{Manifest.permission.ACCESS_COARSE_LOCATION, Manifest.permission.ACCESS_FINE_LOCATION},
PERMISSION_LOCATION_REQUEST_CODE);
}
if (ContextCompat.checkSelfPermission(getContext(), android.Manifest.permission.ACCESS_FINE_LOCATION) ==
PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(getContext(), android.Manifest.permission.ACCESS_COARSE_LOCATION) ==
PackageManager.PERMISSION_GRANTED) {
}
Log.d("working", "hello");
Criteria criteria = new Criteria();
LocationManager locationManager = (LocationManager) getContext().getSystemService(Context.LOCATION_SERVICE);
String bestProvider = String.valueOf(locationManager.getBestProvider(criteria, true)).toString();
Location location1 = locationManager.getLastKnownLocation(bestProvider);
Double latitude=0.0;
Double longitude=0.0;
if (location1 != null) {
Log.e("TAG", "GPS is on");
latitude = location1.getLatitude();
longitude = location1.getLongitude();
}
try {
BiddingChooseItem.bidItemClicked.setLocation(latitude, longitude);
BiddingChooseItem.bidItemClicked.setborrowedtrue();
}catch (NullPointerException e){
e.printStackTrace();
}
DatabaseController.updateItem(temp, BiddingChooseItem.bidItemClicked);
BiddingChooseItem.bidAccepted = true;
finish();
}
});
/**
* Deletes declined bid and checks to see if any bids left on that item
*/
declineBid.setOnClickListener(new View.OnClickListener() {
/**
* On click decline, the bid is deleted and that particular bid is removed from the DB
* @param v
*/
@Override
public void onClick(View v) {
search();
Log.i("id->", BidChooseBid.bidClicked.getId());
DatabaseController.deleteBids(BidChooseBid.bidClicked);
bids.remove(BidChooseBid.bidClicked);
Log.i("size->", String.valueOf(bids.size()));
if (bids.size() == 1){
BiddingChooseItem.bidAccepted = true;
WarItem temp = BiddingChooseItem.bidItemClicked;
BiddingChooseItem.bidItemClicked.setStatus(0);
BiddingChooseItem.bidItemClicked.setBorrower(null);
DatabaseController.updateItem(temp, BiddingChooseItem.bidItemClicked);
} else {
BiddingChooseItem.bidAccepted = false;
}
finish();
}
});
bidderNameText.setOnClickListener(new View.OnClickListener() {
/**
* On lcik bid name text
* @param v
*/
@Override
public void onClick(View v) {
//TODO Need to make it so viewspecificuser grabs the specific user from the previous activity.
specificUser = BidChooseBid.bidClicked.getBidder();
Intent intent = new Intent(AcceptOrRejectBid.this, ViewSpecificUser.class).putExtra("from", "AcceptOrRejectBid");
startActivity(intent);
}
});
}
/**
* On start
*/
protected void onStart(){
super.onStart();
if (BiddingChooseItem.bidAccepted == true){
finish();
}
}
/**
* Searching for all the bids for the current item
*/
public void search(){
DatabaseController.GetBids getBidsTask = new DatabaseController.GetBids();
try {
for (int i=bids.size() - 1; i>=0; i--) {
bids.remove(i);
}
ArrayList<Bid> bidsPreSearch = new ArrayList<Bid>();
WarItem temp;
getBidsTask.execute("");
bidsPreSearch = getBidsTask.get();
Log.i("size-> ", "" + bidsPreSearch.size());
for (int i=0; i<bidsPreSearch.size(); i++){
temp = BiddingChooseItem.bidItemClicked;
Log.i("item->",""+bidsPreSearch.get(i).getItemBidOn().getName() );
if (temp.getName().equals(bidsPreSearch.get(i).getItemBidOn().getName())) {
bids.add(bidsPreSearch.get(i));
}
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
/**
* Getting the context for this class
* @return this
*/
public Context getContext(){
return this;
}
/**
* Getting the Activity for this class
* @return this
*/
public Activity getActivity(){
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap.handlers;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.message.AbandonListener;
import org.apache.directory.api.ldap.model.message.AbandonableRequest;
import org.apache.directory.api.ldap.model.message.SearchRequest;
import org.apache.directory.api.ldap.model.message.SearchResultEntry;
import org.apache.directory.api.ldap.model.message.SearchResultEntryImpl;
import org.apache.directory.api.ldap.model.message.controls.ChangeType;
import org.apache.directory.api.ldap.model.message.controls.EntryChange;
import org.apache.directory.api.ldap.model.message.controls.EntryChangeImpl;
import org.apache.directory.api.ldap.model.message.controls.PersistentSearch;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.util.Strings;
import org.apache.directory.server.core.api.entry.ClonedServerEntry;
import org.apache.directory.server.core.api.entry.ServerEntryUtils;
import org.apache.directory.server.core.api.event.DirectoryListener;
import org.apache.directory.server.core.api.interceptor.context.AddOperationContext;
import org.apache.directory.server.core.api.interceptor.context.ChangeOperationContext;
import org.apache.directory.server.core.api.interceptor.context.DeleteOperationContext;
import org.apache.directory.server.core.api.interceptor.context.LookupOperationContext;
import org.apache.directory.server.core.api.interceptor.context.ModifyOperationContext;
import org.apache.directory.server.core.api.interceptor.context.MoveAndRenameOperationContext;
import org.apache.directory.server.core.api.interceptor.context.MoveOperationContext;
import org.apache.directory.server.core.api.interceptor.context.RenameOperationContext;
import org.apache.directory.server.ldap.LdapSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A DirectoryListener implementation which sends back added, deleted, modified or
* renamed entries to a client that created this listener. This class is part of the
* persistent search implementation which uses the event notification scheme built into
* the server core.
*
* This listener is disabled only when a session closes or when an abandon request
* cancels it. Hence time and size limits in normal search operations do not apply
* here.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class PersistentSearchListener implements DirectoryListener, AbandonListener
{
private static final Logger LOG = LoggerFactory.getLogger( PersistentSearchListener.class );
final LdapSession session;
final SearchRequest req;
final PersistentSearch psearchControl;
private LookupOperationContext filterCtx;
private SchemaManager schemaManager;
public PersistentSearchListener( LdapSession session, SearchRequest req )
{
this.session = session;
this.req = req;
req.addAbandonListener( this );
this.psearchControl = ( PersistentSearch ) req.getControls().get( PersistentSearch.OID );
filterCtx = new LookupOperationContext( session.getCoreSession(), req.getAttributes().toArray( Strings.EMPTY_STRING_ARRAY ) );
schemaManager = session.getCoreSession().getDirectoryService().getSchemaManager();
}
@Override
public boolean isSynchronous()
{
return false; // always asynchronous
}
public void abandon()
{
// must abandon the operation
session.getCoreSession().getDirectoryService().getEventService().removeListener( this );
/*
* From RFC 2251 Section 4.11:
*
* In the event that a server receives an Abandon Request on a Search
* operation in the midst of transmitting responses to the Search, that
* server MUST cease transmitting entry responses to the abandoned
* request immediately, and MUST NOT send the SearchResultDone. Of
* course, the server MUST ensure that only properly encoded LDAPMessage
* PDUs are transmitted.
*
* SO DON'T SEND BACK ANYTHING!!!!!
*/
}
public void requestAbandoned( AbandonableRequest req )
{
abandon();
}
private void setECResponseControl( SearchResultEntry response, ChangeOperationContext opContext, ChangeType type )
{
if ( psearchControl.isReturnECs() )
{
EntryChange ecControl = new EntryChangeImpl();
ecControl.setChangeType( type );
if ( opContext.getChangeLogEvent() != null )
{
ecControl.setChangeNumber( opContext.getChangeLogEvent().getRevision() );
}
if ( opContext instanceof RenameOperationContext || opContext instanceof MoveOperationContext )
{
ecControl.setPreviousDn( opContext.getDn() );
}
response.addControl( ecControl );
}
}
public void entryAdded( AddOperationContext addContext )
{
if ( !psearchControl.isNotificationEnabled( ChangeType.ADD ) )
{
return;
}
SearchResultEntry respEntry = new SearchResultEntryImpl( req.getMessageId() );
respEntry.setObjectName( addContext.getDn() );
// the entry needs to be cloned cause addContext.getEntry() will only contain
// the user provided values and all the operational attributes added during
// Partition.add() will be applied in the cloned entry present inside it
// if we don't clone then the attributes will not be filtered
// e.x the operational attributes will also be sent even when a user requests
// user attributes only
Entry entry = new ClonedServerEntry( addContext.getEntry() );
filterEntry( entry );
respEntry.setEntry( entry );
setECResponseControl( respEntry, addContext, ChangeType.ADD );
session.getIoSession().write( respEntry );
}
public void entryDeleted( DeleteOperationContext deleteContext )
{
if ( !psearchControl.isNotificationEnabled( ChangeType.DELETE ) )
{
return;
}
SearchResultEntry respEntry = new SearchResultEntryImpl( req.getMessageId() );
respEntry.setObjectName( deleteContext.getDn() );
filterEntry( deleteContext.getEntry() );
respEntry.setEntry( deleteContext.getEntry() );
setECResponseControl( respEntry, deleteContext, ChangeType.DELETE );
session.getIoSession().write( respEntry );
}
public void entryModified( ModifyOperationContext modifyContext )
{
if ( !psearchControl.isNotificationEnabled( ChangeType.MODIFY ) )
{
return;
}
SearchResultEntry respEntry = new SearchResultEntryImpl( req.getMessageId() );
respEntry.setObjectName( modifyContext.getDn() );
Entry entry = new ClonedServerEntry( modifyContext.getAlteredEntry() );
filterEntry( entry );
respEntry.setEntry( entry );
setECResponseControl( respEntry, modifyContext, ChangeType.MODIFY );
session.getIoSession().write( respEntry );
}
public void entryMoved( MoveOperationContext moveContext )
{
if ( !psearchControl.isNotificationEnabled( ChangeType.MODDN ) )
{
return;
}
SearchResultEntry respEntry = new SearchResultEntryImpl( req.getMessageId() );
respEntry.setObjectName( moveContext.getNewDn() );
Entry entry = new ClonedServerEntry( moveContext.getModifiedEntry() );
filterEntry( entry );
respEntry.setEntry( entry );
setECResponseControl( respEntry, moveContext, ChangeType.MODDN );
session.getIoSession().write( respEntry );
}
public void entryMovedAndRenamed( MoveAndRenameOperationContext moveAndRenameContext )
{
entryRenamed( moveAndRenameContext );
}
public void entryRenamed( RenameOperationContext renameContext )
{
if ( !psearchControl.isNotificationEnabled( ChangeType.MODDN ) )
{
return;
}
SearchResultEntry respEntry = new SearchResultEntryImpl( req.getMessageId() );
respEntry.setObjectName( renameContext.getModifiedEntry().getDn() );
Entry entry = new ClonedServerEntry( renameContext.getModifiedEntry() );
filterEntry( entry );
respEntry.setEntry( entry );
setECResponseControl( respEntry, renameContext, ChangeType.MODDN );
session.getIoSession().write( respEntry );
}
/**
* A convenient method to filter the contents of an entry
*
* @see ServerEntryUtils#filterContents(SchemaManager, org.apache.directory.server.core.api.interceptor.context.FilteringOperationContext, Entry)
*
* @param entry
*/
private void filterEntry( Entry entry )
{
try
{
ServerEntryUtils.filterContents( schemaManager, filterCtx, entry );
}
catch ( LdapException e )
{
// shouldn't happen, if it does then blow up
throw new RuntimeException( e );
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1p1beta1/securitycenter_service.proto
package com.google.cloud.securitycenter.v1p1beta1;
/**
*
*
* <pre>
* Request message for getting organization settings.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest}
*/
public final class GetOrganizationSettingsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)
GetOrganizationSettingsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetOrganizationSettingsRequest.newBuilder() to construct.
private GetOrganizationSettingsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetOrganizationSettingsRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetOrganizationSettingsRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GetOrganizationSettingsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_GetOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_GetOrganizationSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest.class,
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest other =
(com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for getting organization settings.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_GetOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_GetOrganizationSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest.class,
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest.Builder
.class);
}
// Construct using
// com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_GetOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest build() {
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest buildPartial() {
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest result =
new com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest) {
return mergeFrom(
(com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest other) {
if (other
== com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the organization to get organization settings for. Its format is
* "organizations/[organization_id]/organizationSettings".
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest)
private static final com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest();
}
public static com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetOrganizationSettingsRequest> PARSER =
new com.google.protobuf.AbstractParser<GetOrganizationSettingsRequest>() {
@java.lang.Override
public GetOrganizationSettingsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetOrganizationSettingsRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetOrganizationSettingsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetOrganizationSettingsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.GetOrganizationSettingsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package brooklyn.entity.brooklynnode;
import java.net.URI;
import java.util.List;
import java.util.Map;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.config.ConfigKey;
import brooklyn.config.render.RendererHints;
import brooklyn.entity.Effector;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.BrooklynTaskTags;
import brooklyn.entity.basic.SoftwareProcessImpl;
import brooklyn.entity.effector.EffectorBody;
import brooklyn.entity.effector.Effectors;
import brooklyn.event.feed.ConfigToAttributes;
import brooklyn.event.feed.http.HttpFeed;
import brooklyn.event.feed.http.HttpPollConfig;
import brooklyn.event.feed.http.HttpValueFunctions;
import brooklyn.util.collections.Jsonya;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.config.ConfigBag;
import brooklyn.util.exceptions.Exceptions;
import brooklyn.util.http.HttpTool;
import brooklyn.util.http.HttpTool.HttpClientBuilder;
import brooklyn.util.http.HttpToolResponse;
import brooklyn.util.javalang.JavaClassNames;
import brooklyn.util.net.Urls;
import brooklyn.util.stream.Streams;
import brooklyn.util.task.Tasks;
import brooklyn.util.text.Strings;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.gson.Gson;
public class BrooklynNodeImpl extends SoftwareProcessImpl implements BrooklynNode {
private static final Logger log = LoggerFactory.getLogger(BrooklynNodeImpl.class);
static {
RendererHints.register(WEB_CONSOLE_URI, new RendererHints.NamedActionWithUrl("Open"));
}
private HttpFeed httpFeed;
public BrooklynNodeImpl() {
super();
}
public BrooklynNodeImpl(Entity parent) {
super(parent);
}
@Override
public Class getDriverInterface() {
return BrooklynNodeDriver.class;
}
@Override
public void init() {
super.init();
getMutableEntityType().addEffector(DeployBlueprintEffectorBody.DEPLOY_BLUEPRINT);
}
public static class DeployBlueprintEffectorBody extends EffectorBody<String> implements DeployBlueprintEffector {
public static final Effector<String> DEPLOY_BLUEPRINT = Effectors.effector(BrooklynNode.DEPLOY_BLUEPRINT).impl(new DeployBlueprintEffectorBody()).build();
// TODO support YAML parsing
// TODO define a new type YamlMap for the config key which supports coercing from string and from map
public static Map<String,Object> asMap(ConfigBag parameters, ConfigKey<?> key) {
Object v = parameters.getStringKey(key.getName());
if (v==null || (v instanceof String && Strings.isBlank((String)v)))
return null;
if (v instanceof Map)
return (Map<String, Object>) v;
if (v instanceof String) {
// TODO ideally, parse YAML
return new Gson().fromJson((String)v, Map.class);
}
throw new IllegalArgumentException("Invalid "+JavaClassNames.simpleClassName(v)+" value for "+key+": "+v);
}
@Override
public String call(ConfigBag parameters) {
String plan = extractPlanYamlString(parameters);
return submitPlan(plan);
}
protected String extractPlanYamlString(ConfigBag parameters) {
Object planRaw = parameters.getStringKey(BLUEPRINT_CAMP_PLAN.getName());
if (planRaw instanceof String && Strings.isBlank((String)planRaw)) planRaw = null;
String url = parameters.get(BLUEPRINT_TYPE);
if (url!=null && planRaw!=null)
throw new IllegalArgumentException("Cannot supply both plan and url");
if (url==null && planRaw==null)
throw new IllegalArgumentException("Must supply plan or url");
Map<String, Object> config = asMap(parameters, BLUEPRINT_CONFIG);
if (planRaw==null) {
planRaw = Jsonya.at("services").list().put("serviceType", url).put(config).getRootMap();
} else {
if (config!=null)
throw new IllegalArgumentException("Cannot supply plan with config");
}
// planRaw might be a yaml string, or a map; if a map, convert to string
if (planRaw instanceof Map)
planRaw = Jsonya.of((Map)planRaw).toString();
if (!(planRaw instanceof String))
throw new IllegalArgumentException("Invalid "+JavaClassNames.simpleClassName(planRaw)+" value for CAMP plan: "+planRaw);
return (String)planRaw;
}
protected String submitPlan(String plan) {
// now all the data is in planRaw, submit that
HttpClientBuilder builder = HttpTool.httpClientBuilder()
.trustAll()
.laxRedirect(true);
if (entity().getConfig(MANAGEMENT_USER)!=null)
builder.credentials(new UsernamePasswordCredentials(entity().getConfig(MANAGEMENT_USER), entity().getConfig(MANAGEMENT_PASSWORD)));
HttpClient client = builder.build();
URI baseUri = Preconditions.checkNotNull(entity().getAttribute(WEB_CONSOLE_URI), "Cannot be invoked until the web console URL is available");
return submitPlan(client, baseUri, plan);
}
@VisibleForTesting
// Integration test for this in BrooklynNodeIntegrationTest in this project doesn't use this method,
// but a Unit test for this does, in DeployBlueprintTest -- but in the REST server project (since it runs against local)
public static String submitPlan(HttpClient client, URI baseUri, String plan) {
URI uri = URI.create(Urls.mergePaths(baseUri.toString(), "/v1/applications"));
HttpToolResponse result = null;
byte[] content;
try {
result = HttpTool.httpPost(client, uri, MutableMap.of(com.google.common.net.HttpHeaders.CONTENT_TYPE, "application/yaml"), plan.getBytes());
content = result.getContent();
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
throw new IllegalStateException("Invalid response invoking "+uri+": "+e, e);
}
Tasks.addTagDynamically(BrooklynTaskTags.tagForStream("http_response", Streams.byteArray(content)));
if (!HttpTool.isStatusCodeHealthy(result.getResponseCode())) {
log.warn("Invalid response invoking "+uri+": response code "+result.getResponseCode()+"\n"+result+": "+new String(content));
throw new IllegalStateException("Invalid response invoking "+uri+": response code "+result.getResponseCode());
}
return (String)new Gson().fromJson(new String(content), Map.class).get("entityId");
}
}
public List<String> getClasspath() {
List<String> classpath = getConfig(CLASSPATH);
if (classpath == null || classpath.isEmpty()) {
classpath = getManagementContext().getConfig().getConfig(CLASSPATH);
}
return classpath;
}
protected List<String> getEnabledHttpProtocols() {
return getAttribute(ENABLED_HTTP_PROTOCOLS);
}
protected boolean isHttpProtocolEnabled(String protocol) {
List<String> protocols = getAttribute(ENABLED_HTTP_PROTOCOLS);
for (String contender : protocols) {
if (protocol.equalsIgnoreCase(contender)) {
return true;
}
}
return false;
}
@Override
protected void connectSensors() {
super.connectSensors();
// TODO what sensors should we poll?
ConfigToAttributes.apply(this);
String host = getAttribute(WEB_CONSOLE_BIND_ADDRESS);
if (Strings.isEmpty(host)) {
if (getAttribute(NO_WEB_CONSOLE_AUTHENTICATION)) {
host = "localhost"; // Because of --noConsoleSecurity option
} else {
host = getAttribute(HOSTNAME);
}
}
URI webConsoleUri;
if (isHttpProtocolEnabled("http")) {
int port = getConfig(PORT_MAPPER).apply(getAttribute(HTTP_PORT));
webConsoleUri = URI.create(String.format("http://%s:%s", host, port));
setAttribute(WEB_CONSOLE_URI, webConsoleUri);
} else if (isHttpProtocolEnabled("https")) {
int port = getConfig(PORT_MAPPER).apply(getAttribute(HTTPS_PORT));
webConsoleUri = URI.create(String.format("https://%s:%s", host, port));
setAttribute(WEB_CONSOLE_URI, webConsoleUri);
} else {
// web-console is not enabled
setAttribute(WEB_CONSOLE_URI, null);
webConsoleUri = null;
}
if (webConsoleUri != null) {
httpFeed = HttpFeed.builder()
.entity(this)
.period(200)
.baseUri(webConsoleUri)
.credentialsIfNotNull(getConfig(MANAGEMENT_USER), getConfig(MANAGEMENT_PASSWORD))
.poll(new HttpPollConfig<Boolean>(SERVICE_UP)
.onSuccess(HttpValueFunctions.responseCodeEquals(200))
.setOnFailureOrException(false))
.build();
} else {
setAttribute(SERVICE_UP, true);
}
}
@Override
protected void disconnectSensors() {
super.disconnectSensors();
if (httpFeed != null) httpFeed.stop();
}
}
| |
/*
* Copyright 2014 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.cache;
import java.io.*;
import java.util.*;
import org.teavm.javascript.MethodNodeCache;
import org.teavm.javascript.ast.*;
import org.teavm.model.MethodReference;
import org.teavm.parsing.ClassDateProvider;
/**
*
* @author Alexey Andreev
*/
public class DiskRegularMethodNodeCache implements MethodNodeCache {
private File directory;
private AstIO astIO;
private ClassDateProvider classDateProvider;
private Map<MethodReference, Item> cache = new HashMap<>();
private Map<MethodReference, AsyncItem> asyncCache = new HashMap<>();
private Set<MethodReference> newMethods = new HashSet<>();
private Set<MethodReference> newAsyncMethods = new HashSet<>();
public DiskRegularMethodNodeCache(File directory, SymbolTable symbolTable, SymbolTable fileTable,
ClassDateProvider classDateProvider) {
this.directory = directory;
astIO = new AstIO(symbolTable, fileTable);
this.classDateProvider = classDateProvider;
}
@Override
public RegularMethodNode get(MethodReference methodReference) {
Item item = cache.get(methodReference);
if (item == null) {
item = new Item();
cache.put(methodReference, item);
File file = getMethodFile(methodReference, false);
if (file.exists()) {
try (InputStream stream = new BufferedInputStream(new FileInputStream(file))) {
DataInput input = new DataInputStream(stream);
int depCount = input.readShort();
boolean dependenciesChanged = false;
for (int i = 0; i < depCount; ++i) {
String depClass = input.readUTF();
Date depDate = classDateProvider.getModificationDate(depClass);
if (depDate == null || depDate.after(new Date(file.lastModified()))) {
dependenciesChanged = true;
break;
}
}
if (!dependenciesChanged) {
item.node = astIO.read(input, methodReference);
}
} catch (IOException e) {
// we could not read program, just leave it empty
}
}
}
return item.node;
}
@Override
public void store(MethodReference methodReference, RegularMethodNode node) {
Item item = new Item();
item.node = node;
cache.put(methodReference, item);
newMethods.add(methodReference);
}
@Override
public AsyncMethodNode getAsync(MethodReference methodReference) {
AsyncItem item = asyncCache.get(methodReference);
if (item == null) {
item = new AsyncItem();
asyncCache.put(methodReference, item);
File file = getMethodFile(methodReference, true);
if (file.exists()) {
try (InputStream stream = new BufferedInputStream(new FileInputStream(file))) {
DataInput input = new DataInputStream(stream);
int depCount = input.readShort();
boolean dependenciesChanged = false;
for (int i = 0; i < depCount; ++i) {
String depClass = input.readUTF();
Date depDate = classDateProvider.getModificationDate(depClass);
if (depDate == null || depDate.after(new Date(file.lastModified()))) {
dependenciesChanged = true;
break;
}
}
if (!dependenciesChanged) {
item.node = astIO.readAsync(input, methodReference);
}
} catch (IOException e) {
// we could not read program, just leave it empty
}
}
}
return item.node;
}
@Override
public void storeAsync(MethodReference methodReference, AsyncMethodNode node) {
AsyncItem item = new AsyncItem();
item.node = node;
asyncCache.put(methodReference, item);
newAsyncMethods.add(methodReference);
}
public void flush() throws IOException {
for (MethodReference method : newMethods) {
File file = getMethodFile(method, true);
AstDependencyAnalyzer analyzer = new AstDependencyAnalyzer();
RegularMethodNode node = cache.get(method).node;
node.getBody().acceptVisitor(analyzer);
analyzer.dependencies.add(method.getClassName());
try (DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)))) {
output.writeShort(analyzer.dependencies.size());
for (String dependency : analyzer.dependencies) {
output.writeUTF(dependency);
}
astIO.write(output, node);
}
}
for (MethodReference method : newAsyncMethods) {
File file = getMethodFile(method, true);
AstDependencyAnalyzer analyzer = new AstDependencyAnalyzer();
AsyncMethodNode node = asyncCache.get(method).node;
for (AsyncMethodPart part : node.getBody()) {
part.getStatement().acceptVisitor(analyzer);
}
analyzer.dependencies.add(method.getClassName());
try (DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)))) {
output.writeShort(analyzer.dependencies.size());
for (String dependency : analyzer.dependencies) {
output.writeUTF(dependency);
}
astIO.writeAsync(output, node);
}
}
}
private File getMethodFile(MethodReference method, boolean async) {
File dir = new File(directory, method.getClassName().replace('.', '/'));
return new File(dir, FileNameEncoder.encodeFileName(method.getDescriptor().toString()) + ".teavm-ast"
+ (async ? "-async" : ""));
}
static class AstDependencyAnalyzer implements StatementVisitor, ExprVisitor {
Set<String> dependencies = new HashSet<>();
private void visitSequence(List<Statement> statements) {
for (Statement stmt : statements) {
stmt.acceptVisitor(this);
}
}
@Override
public void visit(AssignmentStatement statement) {
if (statement.getLeftValue() != null) {
statement.getLeftValue().acceptVisitor(this);
}
statement.getRightValue().acceptVisitor(this);
}
@Override
public void visit(SequentialStatement statement) {
visitSequence(statement.getSequence());
}
@Override
public void visit(ConditionalStatement statement) {
statement.getCondition().acceptVisitor(this);
visitSequence(statement.getConsequent());
visitSequence(statement.getAlternative());
}
@Override
public void visit(SwitchStatement statement) {
statement.getValue().acceptVisitor(this);
for (SwitchClause clause : statement.getClauses()) {
visitSequence(clause.getBody());
}
visitSequence(statement.getDefaultClause());
}
@Override
public void visit(WhileStatement statement) {
if (statement.getCondition() != null) {
statement.getCondition().acceptVisitor(this);
}
visitSequence(statement.getBody());
}
@Override
public void visit(BlockStatement statement) {
visitSequence(statement.getBody());
}
@Override
public void visit(BreakStatement statement) {
}
@Override
public void visit(ContinueStatement statement) {
}
@Override
public void visit(ReturnStatement statement) {
if (statement.getResult() != null) {
statement.getResult().acceptVisitor(this);
}
}
@Override
public void visit(ThrowStatement statement) {
statement.getException().acceptVisitor(this);
}
@Override
public void visit(InitClassStatement statement) {
}
@Override
public void visit(TryCatchStatement statement) {
visitSequence(statement.getProtectedBody());
visitSequence(statement.getHandler());
}
@Override
public void visit(BinaryExpr expr) {
expr.getFirstOperand().acceptVisitor(this);
expr.getSecondOperand().acceptVisitor(this);
}
@Override
public void visit(UnaryExpr expr) {
expr.getOperand().acceptVisitor(this);
}
@Override
public void visit(ConditionalExpr expr) {
expr.getCondition().acceptVisitor(this);
expr.getConsequent().acceptVisitor(this);
expr.getAlternative().acceptVisitor(this);
}
@Override
public void visit(ConstantExpr expr) {
}
@Override
public void visit(VariableExpr expr) {
}
@Override
public void visit(SubscriptExpr expr) {
expr.getArray().acceptVisitor(this);
expr.getIndex().acceptVisitor(this);
}
@Override
public void visit(UnwrapArrayExpr expr) {
expr.getArray().acceptVisitor(this);
}
@Override
public void visit(InvocationExpr expr) {
dependencies.add(expr.getMethod().getClassName());
for (Expr argument : expr.getArguments()) {
argument.acceptVisitor(this);
}
}
@Override
public void visit(QualificationExpr expr) {
dependencies.add(expr.getField().getClassName());
expr.getQualified().acceptVisitor(this);
}
@Override
public void visit(NewExpr expr) {
}
@Override
public void visit(NewArrayExpr expr) {
expr.getLength().acceptVisitor(this);
}
@Override
public void visit(NewMultiArrayExpr expr) {
for (Expr dimension : expr.getDimensions()) {
dimension.acceptVisitor(this);
}
}
@Override
public void visit(InstanceOfExpr expr) {
expr.getExpr().acceptVisitor(this);
}
@Override
public void visit(StaticClassExpr expr) {
}
@Override
public void visit(GotoPartStatement statement) {
}
@Override
public void visit(MonitorEnterStatement statement) {
}
@Override
public void visit(MonitorExitStatement statement) {
}
}
static class Item {
RegularMethodNode node;
}
static class AsyncItem {
AsyncMethodNode node;
}
}
| |
package com.cordovaplugincamerapreview;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.util.Base64;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.os.Bundle;
import android.util.Log;
import android.util.DisplayMetrics;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import org.apache.cordova.LOG;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.Exception;
import java.lang.Integer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Arrays;
public class CameraActivity extends Fragment {
public interface CameraPreviewListener {
void onPictureTaken(String originalPicture);
void onPictureTakenError(String message);
void onFocusSet(int pointX, int pointY);
void onFocusSetError(String message);
void onCameraStarted();
}
private CameraPreviewListener eventListener;
private static final String TAG = "CameraActivity";
public FrameLayout mainLayout;
public FrameLayout frameContainerLayout;
private Preview mPreview;
private boolean canTakePicture = true;
private View view;
private Camera.Parameters cameraParameters;
private Camera mCamera;
private int numberOfCameras;
private int cameraCurrentlyLocked;
private int currentQuality;
// The first rear facing camera
private int defaultCameraId;
public String defaultCamera;
public boolean tapToTakePicture;
public boolean dragEnabled;
public boolean tapToFocus;
public int width;
public int height;
public int x;
public int y;
public void setEventListener(CameraPreviewListener listener){
eventListener = listener;
}
private String appResourcesPackage;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
appResourcesPackage = getActivity().getPackageName();
// Inflate the layout for this fragment
view = inflater.inflate(getResources().getIdentifier("camera_activity", "layout", appResourcesPackage), container, false);
createCameraPreview();
return view;
}
public void setRect(int x, int y, int width, int height){
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
private void createCameraPreview(){
if(mPreview == null) {
setDefaultCameraId();
//set box position and size
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(width, height);
layoutParams.setMargins(x, y, 0, 0);
frameContainerLayout = (FrameLayout) view.findViewById(getResources().getIdentifier("frame_container", "id", appResourcesPackage));
frameContainerLayout.setLayoutParams(layoutParams);
//video view
mPreview = new Preview(getActivity());
mainLayout = (FrameLayout) view.findViewById(getResources().getIdentifier("video_view", "id", appResourcesPackage));
mainLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
mainLayout.addView(mPreview);
mainLayout.setEnabled(false);
final GestureDetector gestureDetector = new GestureDetector(getActivity().getApplicationContext(), new TapGestureDetector());
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
frameContainerLayout.setClickable(true);
frameContainerLayout.setOnTouchListener(new View.OnTouchListener() {
private int mLastTouchX;
private int mLastTouchY;
private int mPosX = 0;
private int mPosY = 0;
@Override
public boolean onTouch(View v, MotionEvent event) {
FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) frameContainerLayout.getLayoutParams();
boolean isSingleTapTouch = gestureDetector.onTouchEvent(event);
if (event.getAction() != MotionEvent.ACTION_MOVE && isSingleTapTouch) {
if (tapToTakePicture && tapToFocus) {
setFocusArea((int)event.getX(0), (int)event.getY(0), new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
takePicture(0, 0, 85);
} else {
Log.d(TAG, "onTouch:" + " setFocusArea() did not suceed");
}
}
});
} else if(tapToTakePicture){
takePicture(0, 0, 85);
} else if(tapToFocus){
setFocusArea((int)event.getX(0), (int)event.getY(0), new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
// A callback to JS might make sense here.
} else {
Log.d(TAG, "onTouch:" + " setFocusArea() did not suceed");
}
}
});
}
return true;
} else {
if (dragEnabled) {
int x;
int y;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
if(mLastTouchX == 0 || mLastTouchY == 0) {
mLastTouchX = (int)event.getRawX() - layoutParams.leftMargin;
mLastTouchY = (int)event.getRawY() - layoutParams.topMargin;
}
else{
mLastTouchX = (int)event.getRawX();
mLastTouchY = (int)event.getRawY();
}
break;
case MotionEvent.ACTION_MOVE:
x = (int) event.getRawX();
y = (int) event.getRawY();
final float dx = x - mLastTouchX;
final float dy = y - mLastTouchY;
mPosX += dx;
mPosY += dy;
layoutParams.leftMargin = mPosX;
layoutParams.topMargin = mPosY;
frameContainerLayout.setLayoutParams(layoutParams);
// Remember this touch position for the next move event
mLastTouchX = x;
mLastTouchY = y;
break;
default:
break;
}
}
}
return true;
}
});
}
});
}
}
private void setDefaultCameraId(){
// Find the total number of cameras available
numberOfCameras = Camera.getNumberOfCameras();
int camId = defaultCamera.equals("front") ? Camera.CameraInfo.CAMERA_FACING_FRONT : Camera.CameraInfo.CAMERA_FACING_BACK;
// Find the ID of the default camera
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == camId) {
defaultCameraId = camId;
break;
}
}
}
@Override
public void onResume() {
super.onResume();
mCamera = Camera.open(defaultCameraId);
if (cameraParameters != null) {
mCamera.setParameters(cameraParameters);
}
cameraCurrentlyLocked = defaultCameraId;
if(mPreview.mPreviewSize == null){
mPreview.setCamera(mCamera, cameraCurrentlyLocked);
eventListener.onCameraStarted();
} else {
mPreview.switchCamera(mCamera, cameraCurrentlyLocked);
mCamera.startPreview();
}
Log.d(TAG, "cameraCurrentlyLocked:" + cameraCurrentlyLocked);
final FrameLayout frameContainerLayout = (FrameLayout) view.findViewById(getResources().getIdentifier("frame_container", "id", appResourcesPackage));
ViewTreeObserver viewTreeObserver = frameContainerLayout.getViewTreeObserver();
if (viewTreeObserver.isAlive()) {
viewTreeObserver.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
frameContainerLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
frameContainerLayout.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED);
final RelativeLayout frameCamContainerLayout = (RelativeLayout) view.findViewById(getResources().getIdentifier("frame_camera_cont", "id", appResourcesPackage));
FrameLayout.LayoutParams camViewLayout = new FrameLayout.LayoutParams(frameContainerLayout.getWidth(), frameContainerLayout.getHeight());
camViewLayout.gravity = Gravity.CENTER_HORIZONTAL | Gravity.CENTER_VERTICAL;
frameCamContainerLayout.setLayoutParams(camViewLayout);
}
});
}
}
@Override
public void onPause() {
super.onPause();
// Because the Camera object is a shared resource, it's very important to release it when the activity is paused.
if (mCamera != null) {
setDefaultCameraId();
mPreview.setCamera(null, -1);
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
public Camera getCamera() {
return mCamera;
}
public void switchCamera() {
// check for availability of multiple cameras
if (numberOfCameras == 1) {
//There is only one camera available
}else{
Log.d(TAG, "numberOfCameras: " + numberOfCameras);
// OK, we have multiple cameras. Release this camera -> cameraCurrentlyLocked
if (mCamera != null) {
mCamera.stopPreview();
mPreview.setCamera(null, -1);
mCamera.release();
mCamera = null;
}
Log.d(TAG, "cameraCurrentlyLocked := " + Integer.toString(cameraCurrentlyLocked));
try {
cameraCurrentlyLocked = (cameraCurrentlyLocked + 1) % numberOfCameras;
Log.d(TAG, "cameraCurrentlyLocked new: " + cameraCurrentlyLocked);
} catch (Exception exception) {
Log.d(TAG, exception.getMessage());
}
// Acquire the next camera and request Preview to reconfigure parameters.
mCamera = Camera.open(cameraCurrentlyLocked);
if (cameraParameters != null) {
Log.d(TAG, "camera parameter not null");
// Check for flashMode as well to prevent error on frontward facing camera.
List<String> supportedFlashModesNewCamera = mCamera.getParameters().getSupportedFlashModes();
String currentFlashModePreviousCamera = cameraParameters.getFlashMode();
if (supportedFlashModesNewCamera != null && supportedFlashModesNewCamera.contains(currentFlashModePreviousCamera)) {
Log.d(TAG, "current flash mode supported on new camera. setting params");
/* mCamera.setParameters(cameraParameters);
The line above is disabled because parameters that can actually be changed are different from one device to another. Makes less sense trying to reconfigure them when changing camera device while those settings gan be changed using plugin methods.
*/
} else {
Log.d(TAG, "current flash mode NOT supported on new camera");
}
} else {
Log.d(TAG, "camera parameter NULL");
}
mPreview.switchCamera(mCamera, cameraCurrentlyLocked);
mCamera.startPreview();
}
}
public void setCameraParameters(Camera.Parameters params) {
cameraParameters = params;
if (mCamera != null && cameraParameters != null) {
mCamera.setParameters(cameraParameters);
}
}
public boolean hasFrontCamera(){
return getActivity().getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT);
}
public static Bitmap flipBitmap(Bitmap source) {
Matrix matrix = new Matrix();
matrix.preScale(1.0f, -1.0f);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
ShutterCallback shutterCallback = new ShutterCallback(){
public void onShutter(){
// do nothing, availabilty of this callback causes default system shutter sound to work
}
};
PictureCallback jpegPictureCallback = new PictureCallback(){
public void onPictureTaken(byte[] data, Camera arg1){
Log.d(TAG, "CameraPreview jpegPictureCallback");
try {
if(cameraCurrentlyLocked == Camera.CameraInfo.CAMERA_FACING_FRONT) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
bitmap = flipBitmap(bitmap);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, currentQuality, outputStream);
data = outputStream.toByteArray();
}
String encodedImage = Base64.encodeToString(data, Base64.NO_WRAP);
eventListener.onPictureTaken(encodedImage);
Log.d(TAG, "CameraPreview pictureTakenHandler called back");
} catch (OutOfMemoryError e) {
// most likely failed to allocate memory for rotateBitmap
Log.d(TAG, "CameraPreview OutOfMemoryError");
// failed to allocate memory
eventListener.onPictureTakenError("Picture too large (memory)");
} catch (Exception e) {
Log.d(TAG, "CameraPreview onPictureTaken general exception");
} finally {
canTakePicture = true;
mCamera.startPreview();
}
}
};
private Camera.Size getOptimalPictureSize(final int width, final int height, final Camera.Size previewSize, final List<Camera.Size> supportedSizes){
/*
get the supportedPictureSize that:
- matches exactly width and height
- has the closest aspect ratio to the preview aspect ratio
- has picture.width and picture.height closest to width and height
- has the highest supported picture width and height up to 2 Megapixel if width == 0 || height == 0
*/
Camera.Size size = mCamera.new Size(width, height);
// convert to landscape if necessary
if (size.width < size.height) {
int temp = size.width;
size.width = size.height;
size.height = temp;
}
double previewAspectRatio = (double)previewSize.width / (double)previewSize.height;
if (previewAspectRatio < 1.0) {
// reset ratio to landscape
previewAspectRatio = 1.0 / previewAspectRatio;
}
Log.d(TAG, "CameraPreview previewAspectRatio " + previewAspectRatio);
double aspectTolerance = 0.1;
double bestDifference = Double.MAX_VALUE;
for (int i = 0; i < supportedSizes.size(); i++) {
Camera.Size supportedSize = supportedSizes.get(i);
// Perfect match
if (supportedSize.equals(size)) {
Log.d(TAG, "CameraPreview optimalPictureSize " + supportedSize.width + 'x' + supportedSize.height);
return supportedSize;
}
double difference = Math.abs(previewAspectRatio - ((double)supportedSize.width / (double)supportedSize.height));
if (difference < bestDifference - aspectTolerance) {
// better aspectRatio found
if ((width != 0 && height != 0) || (supportedSize.width * supportedSize.height < 2048 * 1024)) {
size.width = supportedSize.width;
size.height = supportedSize.height;
bestDifference = difference;
}
} else if (difference < bestDifference + aspectTolerance) {
// same aspectRatio found (within tolerance)
if (width == 0 || height == 0) {
// set highest supported resolution below 2 Megapixel
if ((size.width < supportedSize.width) && (supportedSize.width * supportedSize.height < 2048 * 1024)) {
size.width = supportedSize.width;
size.height = supportedSize.height;
}
} else {
// check if this pictureSize closer to requested width and height
if (Math.abs(width * height - supportedSize.width * supportedSize.height) < Math.abs(width * height - size.width * size.height)) {
size.width = supportedSize.width;
size.height = supportedSize.height;
}
}
}
}
Log.d(TAG, "CameraPreview optimalPictureSize " + size.width + 'x' + size.height);
return size;
}
public void takePicture(final int width, final int height, final int quality){
Log.d(TAG, "CameraPreview takePicture width: " + width + ", height: " + height + ", quality: " + quality);
if(mPreview != null) {
if(!canTakePicture){
return;
}
canTakePicture = false;
new Thread() {
public void run() {
Camera.Parameters params = mCamera.getParameters();
Camera.Size size = getOptimalPictureSize(width, height, params.getPreviewSize(), params.getSupportedPictureSizes());
params.setPictureSize(size.width, size.height);
currentQuality = quality;
if(cameraCurrentlyLocked == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// The image will be recompressed in the callback
params.setJpegQuality(99);
} else {
params.setJpegQuality(quality);
}
params.setRotation(mPreview.getDisplayOrientation());
mCamera.setParameters(params);
mCamera.takePicture(shutterCallback, null, jpegPictureCallback);
}
}.start();
} else {
canTakePicture = true;
}
}
public void setFocusArea(final int pointX, final int pointY, final Camera.AutoFocusCallback callback) {
if (mCamera != null) {
mCamera.cancelAutoFocus();
Camera.Parameters parameters = mCamera.getParameters();
Rect focusRect = calculateTapArea(pointX, pointY, 1f);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(Arrays.asList(new Camera.Area(focusRect, 1000)));
if (parameters.getMaxNumMeteringAreas() > 0) {
Rect meteringRect = calculateTapArea(pointX, pointY, 1.5f);
parameters.setMeteringAreas(Arrays.asList(new Camera.Area(meteringRect, 1000)));
}
try {
setCameraParameters(parameters);
mCamera.autoFocus(callback);
} catch (Exception e) {
Log.d(TAG, e.getMessage());
callback.onAutoFocus(false, this.mCamera);
}
}
}
private Rect calculateTapArea(float x, float y, float coefficient) {
return new Rect(
Math.round((x - 100) * 2000 / width - 1000),
Math.round((y - 100) * 2000 / height - 1000),
Math.round((x + 100) * 2000 / width - 1000),
Math.round((y + 100) * 2000 / height - 1000)
);
}
}
| |
package org.sunflow.core.photonmap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.sunflow.core.GlobalPhotonMapInterface;
import org.sunflow.core.Options;
import org.sunflow.core.ShadingState;
import org.sunflow.image.Color;
import org.sunflow.math.BoundingBox;
import org.sunflow.math.MathUtils;
import org.sunflow.math.Point3;
import org.sunflow.math.Vector3;
import org.sunflow.system.UI;
import org.sunflow.system.UI.Module;
public class GridPhotonMap implements GlobalPhotonMapInterface {
private int numGather;
private float gatherRadius;
private int numStoredPhotons;
private int nx, ny, nz;
private BoundingBox bounds;
private PhotonGroup[] cellHash;
private int hashSize;
private int hashPrime;
private ReentrantReadWriteLock rwl;
private int numEmit;
private static final float NORMAL_THRESHOLD = (float) Math.cos(10.0 * Math.PI / 180.0);
private static final int[] PRIMES = { 11, 19, 37, 109, 163, 251, 367, 557,
823, 1237, 1861, 2777, 4177, 6247, 9371, 21089, 31627, 47431,
71143, 106721, 160073, 240101, 360163, 540217, 810343, 1215497,
1823231, 2734867, 4102283, 6153409, 9230113, 13845163 };
public GridPhotonMap() {
numStoredPhotons = 0;
hashSize = 0; // number of unique IDs in the hash
rwl = new ReentrantReadWriteLock();
numEmit = 100000;
}
public void prepare(Options options, BoundingBox sceneBounds) {
// get settings
numEmit = options.getInt("gi.irr-cache.gmap.emit", 100000);
numGather = options.getInt("gi.irr-cache.gmap.gather", 50);
gatherRadius = options.getFloat("gi.irr-cache.gmap.radius", 0.5f);
bounds = new BoundingBox(sceneBounds);
bounds.enlargeUlps();
Vector3 w = bounds.getExtents();
nx = (int) Math.max(((w.x / gatherRadius) + 0.5f), 1);
ny = (int) Math.max(((w.y / gatherRadius) + 0.5f), 1);
nz = (int) Math.max(((w.z / gatherRadius) + 0.5f), 1);
int numCells = nx * ny * nz;
UI.printInfo(Module.LIGHT, "Initializing grid photon map:");
UI.printInfo(Module.LIGHT, " * Resolution: %dx%dx%d", nx, ny, nz);
UI.printInfo(Module.LIGHT, " * Total cells: %d", numCells);
for (hashPrime = 0; hashPrime < PRIMES.length; hashPrime++)
if (PRIMES[hashPrime] > (numCells / 5))
break;
cellHash = new PhotonGroup[PRIMES[hashPrime]];
UI.printInfo(Module.LIGHT, " * Initial hash size: %d", cellHash.length);
}
public int size() {
return numStoredPhotons;
}
public void store(ShadingState state, Vector3 dir, Color power, Color diffuse) {
// don't store on the wrong side of a surface
if (Vector3.dot(state.getNormal(), dir) > 0)
return;
Point3 pt = state.getPoint();
// outside grid bounds ?
if (!bounds.contains(pt))
return;
Vector3 ext = bounds.getExtents();
int ix = (int) (((pt.x - bounds.getMinimum().x) * nx) / ext.x);
int iy = (int) (((pt.y - bounds.getMinimum().y) * ny) / ext.y);
int iz = (int) (((pt.z - bounds.getMinimum().z) * nz) / ext.z);
ix = MathUtils.clamp(ix, 0, nx - 1);
iy = MathUtils.clamp(iy, 0, ny - 1);
iz = MathUtils.clamp(iz, 0, nz - 1);
int id = ix + iy * nx + iz * nx * ny;
synchronized (this) {
int hid = id % cellHash.length;
PhotonGroup g = cellHash[hid];
PhotonGroup last = null;
boolean hasID = false;
while (g != null) {
if (g.id == id) {
hasID = true;
if (Vector3.dot(state.getNormal(), g.normal) > NORMAL_THRESHOLD)
break;
}
last = g;
g = g.next;
}
if (g == null) {
g = new PhotonGroup(id, state.getNormal());
if (last == null)
cellHash[hid] = g;
else
last.next = g;
if (!hasID) {
hashSize++; // we have not seen this ID before
// resize hash if we have grown too large
if (hashSize > cellHash.length)
growPhotonHash();
}
}
g.count++;
g.flux.add(power);
g.diffuse.add(diffuse);
numStoredPhotons++;
}
}
public void init() {
UI.printInfo(Module.LIGHT, "Initializing photon grid ...");
UI.printInfo(Module.LIGHT, " * Photon hits: %d", numStoredPhotons);
UI.printInfo(Module.LIGHT, " * Final hash size: %d", cellHash.length);
int cells = 0;
for (int i = 0; i < cellHash.length; i++) {
for (PhotonGroup g = cellHash[i]; g != null; g = g.next) {
g.diffuse.mul(1.0f / g.count);
cells++;
}
}
UI.printInfo(Module.LIGHT, " * Num photon cells: %d", cells);
}
public void precomputeRadiance(boolean includeDirect, boolean includeCaustics) {
}
private void growPhotonHash() {
// enlarge the hash size:
if (hashPrime >= PRIMES.length - 1)
return;
PhotonGroup[] temp = new PhotonGroup[PRIMES[++hashPrime]];
for (int i = 0; i < cellHash.length; i++) {
PhotonGroup g = cellHash[i];
while (g != null) {
// re-hash into the new table
int hid = g.id % temp.length;
PhotonGroup last = null;
for (PhotonGroup gn = temp[hid]; gn != null; gn = gn.next)
last = gn;
if (last == null)
temp[hid] = g;
else
last.next = g;
PhotonGroup next = g.next;
g.next = null;
g = next;
}
}
cellHash = temp;
}
public synchronized Color getRadiance(Point3 p, Vector3 n) {
if (!bounds.contains(p))
return Color.BLACK;
Vector3 ext = bounds.getExtents();
int ix = (int) (((p.x - bounds.getMinimum().x) * nx) / ext.x);
int iy = (int) (((p.y - bounds.getMinimum().y) * ny) / ext.y);
int iz = (int) (((p.z - bounds.getMinimum().z) * nz) / ext.z);
ix = MathUtils.clamp(ix, 0, nx - 1);
iy = MathUtils.clamp(iy, 0, ny - 1);
iz = MathUtils.clamp(iz, 0, nz - 1);
int id = ix + iy * nx + iz * nx * ny;
rwl.readLock().lock();
PhotonGroup center = null;
for (PhotonGroup g = get(ix, iy, iz); g != null; g = g.next) {
if (g.id == id && Vector3.dot(n, g.normal) > NORMAL_THRESHOLD) {
if (g.radiance == null) {
center = g;
break;
}
Color r = g.radiance.copy();
rwl.readLock().unlock();
return r;
}
}
int vol = 1;
while (true) {
int numPhotons = 0;
int ndiff = 0;
Color irr = Color.black();
Color diff = (center == null) ? Color.black() : null;
for (int z = iz - (vol - 1); z <= iz + (vol - 1); z++) {
for (int y = iy - (vol - 1); y <= iy + (vol - 1); y++) {
for (int x = ix - (vol - 1); x <= ix + (vol - 1); x++) {
int vid = x + y * nx + z * nx * ny;
for (PhotonGroup g = get(x, y, z); g != null; g = g.next) {
if (g.id == vid && Vector3.dot(n, g.normal) > NORMAL_THRESHOLD) {
numPhotons += g.count;
irr.add(g.flux);
if (diff != null) {
diff.add(g.diffuse);
ndiff++;
}
break; // only one valid group can be found,
// skip the others
}
}
}
}
}
if (numPhotons >= numGather || vol >= 3) {
// we have found enough photons
// cache irradiance and return
float area = (2 * vol - 1) / 3.0f * ((ext.x / nx) + (ext.y / ny) + (ext.z / nz));
area *= area;
area *= Math.PI;
irr.mul(1.0f / area);
// upgrade lock manually
rwl.readLock().unlock();
rwl.writeLock().lock();
if (center == null) {
if (ndiff > 0)
diff.mul(1.0f / ndiff);
center = new PhotonGroup(id, n);
center.diffuse.set(diff);
center.next = cellHash[id % cellHash.length];
cellHash[id % cellHash.length] = center;
}
irr.mul(center.diffuse);
center.radiance = irr.copy();
rwl.writeLock().unlock(); // unlock write - done
return irr;
}
vol++;
}
}
private PhotonGroup get(int x, int y, int z) {
// returns the list associated with the specified location
if (x < 0 || x >= nx)
return null;
if (y < 0 || y >= ny)
return null;
if (z < 0 || z >= nz)
return null;
return cellHash[(x + y * nx + z * nx * ny) % cellHash.length];
}
private class PhotonGroup {
int id;
int count;
Vector3 normal;
Color flux;
Color radiance;
Color diffuse;
PhotonGroup next;
PhotonGroup(int id, Vector3 n) {
normal = new Vector3(n);
flux = Color.black();
diffuse = Color.black();
radiance = null;
count = 0;
this.id = id;
next = null;
}
}
public boolean allowDiffuseBounced() {
return true;
}
public boolean allowReflectionBounced() {
return true;
}
public boolean allowRefractionBounced() {
return true;
}
public int numEmit() {
return numEmit;
}
}
| |
/*
* Copyright 2008-2011 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package ae3.service.experiment;
import com.google.common.base.Supplier;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import org.junit.Test;
import uk.ac.ebi.gxa.properties.AtlasProperties;
import java.util.*;
import static org.junit.Assert.*;
/**
* @author Olga Melnichuk
*/
public class AtlasExperimentQueryParserTest {
private static final int DEFAULT_ROWS = 17;
private static final int MAX_ROWS = 35;
private static final String[] FACTORS = new String[]{"cell_type", "disease_state", "organism_part"};
@Test
public void experimentApiQueryTest1() {
AtlasExperimentQuery query = parse("experiment=E-AFMX-1");
assertTrue(query.isValid());
assertCollectionEqualsTo(query.getExperimentKeywords(), "E-AFMX-1");
assertFalse(query.isListAll());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertTrue(query.getFactorValues().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(0, query.getStart());
assertEquals(DEFAULT_ROWS, query.getRows());
}
@Test
public void experimentApiQueryTest2() {
AtlasExperimentQuery query = parse("experimentHasOrganism_part=lung");
assertTrue(query.isValid());
assertMultimapsEquals(createMultimap("organism_part=lung"), query.getFactorValues());
assertFalse(query.isListAll());
assertTrue(query.getExperimentKeywords().isEmpty());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(0, query.getStart());
assertEquals(DEFAULT_ROWS, query.getRows());
}
@Test
public void experimentApiQueryTest3() {
AtlasExperimentQuery query = parse("experimentHasDisease_state=normal&experiment=cancer&start=10&rows=1");
assertTrue(query.isValid());
assertMultimapsEquals(createMultimap("disease_state=normal"), query.getFactorValues());
assertCollectionEqualsTo(query.getExperimentKeywords(), "cancer");
assertFalse(query.isListAll());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(10, query.getStart());
assertEquals(1, query.getRows());
}
@Test
public void experimentApiQueryTest4() {
AtlasExperimentQuery query = parse("experimentHasFactor=cell_type&experiment=cycle");
assertTrue(query.isValid());
assertCollectionEqualsTo(query.getExperimentKeywords(), "cycle");
assertCollectionEqualsTo(query.getFactors(), "cell_type");
assertFalse(query.isListAll());
assertTrue(query.getFactorValues().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(0, query.getStart());
assertEquals(DEFAULT_ROWS, query.getRows());
}
@Test
public void experimentApiQueryTest5() {
AtlasExperimentQuery query = parse("experiment=E-AFMX-5&geneIs=ENSG00000160766&geneIs=ENSG00000166337&format=xml");
assertTrue(query.isValid());
assertCollectionEqualsTo(query.getExperimentKeywords(), "E-AFMX-5");
assertCollectionEqualsTo(query.getGeneIdentifiers(), "ENSG00000160766", "ENSG00000166337");
assertFalse(query.isListAll());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getFactorValues().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertEquals(0, query.getStart());
assertEquals(DEFAULT_ROWS, query.getRows());
}
@Test
public void experimentApiQueryTest6() {
AtlasExperimentQuery query = parse("experiment=listAll&experimentInfoOnly");
assertTrue(query.isValid());
assertTrue(query.isListAll());
assertTrue(query.getExperimentKeywords().isEmpty());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getFactorValues().isEmpty());
assertTrue(query.getAnyFactorValues().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(0, query.getStart());
assertEquals(Integer.MAX_VALUE, query.getRows());
}
@Test
public void experimentApiQueryTest7() {
AtlasExperimentQuery query = parse("experimentHasAnyFactor=factorValue");
assertTrue(query.isValid());
assertCollectionEqualsTo(query.getAnyFactorValues(), "factorValue");
assertFalse(query.isListAll());
assertTrue(query.getExperimentKeywords().isEmpty());
assertTrue(query.getFactors().isEmpty());
assertTrue(query.getGeneIdentifiers().isEmpty());
assertEquals(0, query.getStart());
assertEquals(DEFAULT_ROWS, query.getRows());
}
private void assertMultimapsEquals(Multimap<String, String> m1, Multimap<String, String> m2) {
assertEquals(m1.size(), m2.size());
for (String k : m1.keySet()) {
assertTrue(m2.containsKey(k));
List<String> c1 = new ArrayList<String>(m1.get(k));
List<String> c2 = new ArrayList<String>(m2.get(k));
assertUnsortedListsEquals(c1, c2);
}
}
private void assertCollectionEqualsTo(Collection<String> co, String... s) {
List<String> list1 = Arrays.asList(s);
List<String> list2 = new ArrayList<String>(co);
assertUnsortedListsEquals(list1, list2);
}
private void assertUnsortedListsEquals(List<String> list1, List<String> list2) {
Collections.sort(list1);
Collections.sort(list2);
assertEquals(list1, list2);
}
private Multimap<String, String> createMultimap(String... pairs) {
Multimap<String, String> map = Multimaps.newListMultimap(
new HashMap<String, Collection<String>>(),
new Supplier<List<String>>() {
@Override
public List<String> get() {
return new ArrayList<String>();
}
}
);
for (String p : pairs) {
String[] parts = p.split("=");
map.put(parts[0], parts[1]);
}
return map;
}
private AtlasExperimentQuery parse(String str) {
Map<String, String[]> map = new HashMap<String, String[]>();
String[] parts = str.split("&");
for (String p : parts) {
String[] nameValue = p.split("=");
if (nameValue.length < 2) {
continue;
}
String name = nameValue[0];
String value = nameValue[1];
String[] v = map.get(name);
if (v == null) {
v = new String[]{value};
} else {
String[] tmp = new String[v.length + 1];
System.arraycopy(v, 0, tmp, 0, v.length);
tmp[v.length] = value;
v = tmp;
}
map.put(name, v);
}
AtlasExperimentQueryParser parser = new AtlasExperimentQueryParser(new AtlasProperties() {
@Override
public int getQueryDefaultPageSize() {
return DEFAULT_ROWS;
}
@Override
public int getAPIQueryMaximumPageSize() {
return MAX_ROWS;
}
}, Arrays.asList(FACTORS));
return parser.parse(map);
}
}
| |
package com.gentics.mesh.graphdb.index;
import static com.gentics.mesh.graphdb.FieldTypeMapper.toSubType;
import static com.gentics.mesh.graphdb.FieldTypeMapper.toType;
import java.util.stream.Stream;
import javax.inject.Inject;
import javax.inject.Singleton;
import com.gentics.madl.type.TypeHandler;
import com.gentics.mesh.core.db.GraphDBTx;
import com.gentics.mesh.graphdb.OrientDBDatabase;
import com.gentics.mesh.madl.field.FieldMap;
import com.gentics.mesh.madl.field.FieldType;
import com.gentics.mesh.madl.frame.VertexFrame;
import com.gentics.mesh.madl.type.EdgeTypeDefinition;
import com.gentics.mesh.madl.type.ElementTypeDefinition;
import com.gentics.mesh.madl.type.VertexTypeDefinition;
import com.gentics.mesh.util.StreamUtil;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.syncleus.ferma.FramedGraph;
import com.syncleus.ferma.ext.orientdb.DelegatingFramedOrientGraph;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph;
import com.tinkerpop.blueprints.impls.orient.OrientEdgeType;
import com.tinkerpop.blueprints.impls.orient.OrientGraphNoTx;
import com.tinkerpop.blueprints.impls.orient.OrientVertex;
import com.tinkerpop.blueprints.impls.orient.OrientVertexType;
import com.tinkerpop.blueprints.util.wrappers.wrapped.WrappedVertex;
import dagger.Lazy;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
/**
* The type handler is used to manage OrientDB type system and create, update, removed types.
*/
@Singleton
public class OrientDBTypeHandler implements TypeHandler {
private static final Logger log = LoggerFactory.getLogger(OrientDBTypeHandler.class);
private Lazy<OrientDBDatabase> db;
@Inject
public OrientDBTypeHandler(Lazy<OrientDBDatabase> db) {
this.db = db;
}
@Override
public void addVertexType(String clazzOfVertex, String superClazzOfVertex) {
if (log.isDebugEnabled()) {
log.debug("Adding vertex type for class {" + clazzOfVertex + "}");
}
OrientGraphNoTx noTx = db.get().getTxProvider().rawNoTx();
try {
OrientVertexType vertexType = noTx.getVertexType(clazzOfVertex);
if (vertexType == null) {
String superClazz = "V";
if (superClazzOfVertex != null) {
superClazz = superClazzOfVertex;
}
vertexType = noTx.createVertexType(clazzOfVertex, superClazz);
} else {
// Update the existing vertex type and set the super class
if (superClazzOfVertex != null) {
OrientVertexType superType = noTx.getVertexType(superClazzOfVertex);
if (superType == null) {
throw new RuntimeException("The supertype for vertices of type {" + clazzOfVertex + "} can't be set since the supertype {"
+ superClazzOfVertex + "} was not yet added to orientdb.");
}
vertexType.setSuperClass(superType);
}
}
} finally {
noTx.shutdown();
}
}
@Override
public void createVertexType(Class<?> clazzOfVertex, Class<?> superClazzOfVertex) {
String superClazz = superClazzOfVertex == null ? null : superClazzOfVertex.getSimpleName();
addVertexType(clazzOfVertex.getSimpleName(), superClazz);
}
@Override
public void removeEdgeType(String typeName) {
if (log.isDebugEnabled()) {
log.debug("Removing vertex type with name {" + typeName + "}");
}
OrientGraphNoTx noTx = db.get().getTxProvider().rawNoTx();
try {
noTx.dropEdgeType(typeName);
} finally {
noTx.shutdown();
}
}
@Override
public void removeVertexType(String typeName) {
if (log.isDebugEnabled()) {
log.debug("Removing vertex type with name {" + typeName + "}");
}
OrientGraphNoTx noTx = db.get().getTxProvider().rawNoTx();
try {
OrientVertexType type = noTx.getVertexType(typeName);
if (type != null) {
noTx.dropVertexType(typeName);
}
} finally {
noTx.shutdown();
}
}
@Override
public Vertex changeType(Vertex vertex, String newType, Graph tx) {
OrientVertex v = (OrientVertex) vertex;
ORID newId = v.moveToClass(newType);
return tx.getVertex(newId);
}
@Override
public void setVertexType(Element element, Class<?> classOfVertex) {
if (element instanceof WrappedVertex) {
element = ((WrappedVertex) element).getBaseElement();
}
((OrientVertex) element).moveToClass(classOfVertex.getSimpleName());
}
@Override
public void createType(ElementTypeDefinition def) {
if (def instanceof VertexTypeDefinition) {
VertexTypeDefinition vertexType = (VertexTypeDefinition) def;
createVertexType(vertexType.getClazz(), vertexType.getSuperClazz());
} else if (def instanceof EdgeTypeDefinition) {
EdgeTypeDefinition edgeType = (EdgeTypeDefinition) def;
createEdgeType(edgeType);
}
}
private void createEdgeType(EdgeTypeDefinition def) {
String label = def.getLabel();
Class<?> superClazzOfEdge = def.getSuperClazz();
FieldMap fields = def.getFields();
if (log.isDebugEnabled()) {
log.debug("Adding edge type for label {" + label + "}");
}
OrientGraphNoTx noTx = db.get().getTxProvider().rawNoTx();
try {
OrientEdgeType e = noTx.getEdgeType(label);
if (e == null) {
String superClazz = "E";
if (superClazzOfEdge != null) {
superClazz = superClazzOfEdge.getSimpleName();
}
e = noTx.createEdgeType(label, superClazz);
} else {
// Update the existing edge type and set the super class
if (superClazzOfEdge != null) {
OrientEdgeType superType = noTx.getEdgeType(superClazzOfEdge.getSimpleName());
if (superType == null) {
throw new RuntimeException("The supertype for edges with label {" + label + "} can't be set since the supertype {"
+ superClazzOfEdge.getSimpleName() + "} was not yet added to orientdb.");
}
e.setSuperClass(superType);
}
}
if (fields != null) {
for (String key : fields.keySet()) {
if (e.getProperty(key) == null) {
FieldType fieldType = fields.get(key);
OType type = toType(fieldType);
OType subType = toSubType(fieldType);
if (subType != null) {
e.createProperty(key, type, subType);
} else {
e.createProperty(key, type);
}
}
}
}
} finally {
noTx.shutdown();
}
}
@Override
public <T extends VertexFrame> long count(Class<? extends T> persistanceClass) {
FramedGraph graph = GraphDBTx.getGraphTx().getGraph();
Graph baseGraph = ((DelegatingFramedOrientGraph) graph).getBaseGraph();
OrientBaseGraph orientBaseGraph = ((OrientBaseGraph) baseGraph);
return orientBaseGraph.countVertices(persistanceClass.getSimpleName());
}
@Override
public <T extends VertexFrame> Stream<T> findAll(Class<? extends T> classOfT) {
FramedGraph graph = GraphDBTx.getGraphTx().getGraph();
Graph baseGraph = ((DelegatingFramedOrientGraph) graph).getBaseGraph();
OrientBaseGraph orientBaseGraph = ((OrientBaseGraph) baseGraph);
Iterable<Vertex> it = orientBaseGraph.getVerticesOfClass(classOfT.getSimpleName());
return StreamUtil.toStream(it).map(v -> {
return (T) graph.getFramedVertexExplicit(classOfT, v.getId());
});
}
}
| |
package water.api;
import water.Key;
import water.MemoryManager;
import water.api.KeyV3.FrameKeyV3;
import water.api.KeyV3.VecKeyV3;
import water.fvec.*;
import water.fvec.Frame.VecSpecifier;
import water.parser.ValueString;
import water.util.*;
import water.util.DocGen.HTML;
/**
* All the details on a Frame. Note that inside ColV3 there are fields which won't be
* populated if we don't compute rollups, e.g. via
* the REST API endpoint /Frames/<frameid>/columns/<colname>/summary.
*/
public class FrameV3 extends FrameBase<Frame, FrameV3> {
// Input fields
@API(help="Row offset to display",direction=API.Direction.INPUT)
public long row_offset;
@API(help="Number of rows to display",direction=API.Direction.INOUT)
public int row_count;
@API(help="Column offset to return", direction=API.Direction.INOUT)
public int column_offset;
@API(help="Number of columns to return", direction=API.Direction.INOUT)
public int column_count;
@API(help="Total number of columns in the Frame", direction=API.Direction.INOUT)
public int total_column_count;
// Output fields
@API(help="checksum", direction=API.Direction.OUTPUT)
public long checksum;
@API(help="Number of rows in the Frame", direction=API.Direction.OUTPUT)
public long rows;
@API(help="Default percentiles, from 0 to 1", direction=API.Direction.OUTPUT)
public double[] default_percentiles;
@API(help="Columns in the Frame", direction=API.Direction.OUTPUT)
public ColV3[] columns;
@API(help="Compatible models, if requested", direction=API.Direction.OUTPUT)
public String[] compatible_models;
@API(help="The set of IDs of vectors in the Frame", direction=API.Direction.OUTPUT)
public VecKeyV3[] vec_ids;
@API(help="Chunk summary", direction=API.Direction.OUTPUT)
public TwoDimTableBase chunk_summary;
@API(help="Distribution summary", direction=API.Direction.OUTPUT)
public TwoDimTableBase distribution_summary;
public static class ColSpecifierV3 extends Schema<VecSpecifier, ColSpecifierV3> {
public ColSpecifierV3() { }
public ColSpecifierV3(String column_name) {
this.column_name = column_name;
}
@API(help="Name of the column", direction= API.Direction.INOUT)
public String column_name;
@API(help="List of fields which specify columns that must contain this column", direction= API.Direction.INOUT)
public String[] is_member_of_frames;
}
public static class ColV3 extends Schema<Vec, ColV3> {
static final boolean FORCE_SUMMARY = true;
static final boolean NO_SUMMARY = false;
public ColV3() {}
@API(help="label", direction=API.Direction.OUTPUT)
public String label;
@API(help="missing", direction=API.Direction.OUTPUT)
public long missing_count;
@API(help="zeros", direction=API.Direction.OUTPUT)
public long zero_count;
@API(help="positive infinities", direction=API.Direction.OUTPUT)
public long positive_infinity_count;
@API(help="negative infinities", direction=API.Direction.OUTPUT)
public long negative_infinity_count;
@API(help="mins", direction=API.Direction.OUTPUT)
public double[] mins;
@API(help="maxs", direction=API.Direction.OUTPUT)
public double[] maxs;
@API(help="mean", direction=API.Direction.OUTPUT)
public double mean;
@API(help="sigma", direction=API.Direction.OUTPUT)
public double sigma;
@API(help="datatype: {enum, string, int, real, time, uuid}", direction=API.Direction.OUTPUT)
public String type;
@API(help="domain; not-null for enum columns only", direction=API.Direction.OUTPUT)
public String[] domain;
@API(help="cardinality of this column's domain; not-null for enum columns only", direction=API.Direction.OUTPUT)
public int domain_cardinality;
@API(help="data", direction=API.Direction.OUTPUT)
public double[] data;
@API(help="string data", direction=API.Direction.OUTPUT)
public String[] string_data;
@API(help="decimal precision, -1 for all digits", direction=API.Direction.OUTPUT)
public byte precision;
@API(help="Histogram bins; null if not computed", direction=API.Direction.OUTPUT)
public long[] histogram_bins;
@API(help="Start of histogram bin zero", direction=API.Direction.OUTPUT)
public double histogram_base;
@API(help="Stride per bin", direction=API.Direction.OUTPUT)
public double histogram_stride;
@API(help="Percentile values, matching the default percentiles", direction=API.Direction.OUTPUT)
public double[] percentiles;
transient Vec _vec;
ColV3(String name, Vec vec, long off, int len) {
this(name, vec, off, len, NO_SUMMARY);
}
ColV3(String name, Vec vec, long off, int len, boolean force_summary) {
label=name;
if (force_summary) {
missing_count = vec.naCnt();
zero_count = vec.length() - vec.nzCnt() - missing_count;
positive_infinity_count = vec.pinfs();
negative_infinity_count = vec.ninfs();
mins = vec.mins();
maxs = vec.maxs();
mean = vec.mean();
sigma = vec.sigma();
// Histogram data is only computed on-demand. By default here we do NOT
// compute it, but will return any prior computed & cached histogram.
histogram_bins = vec.lazy_bins();
histogram_base = histogram_bins ==null ? 0 : vec.base();
histogram_stride = histogram_bins ==null ? 0 : vec.stride();
percentiles = histogram_bins ==null ? null : vec.pctiles();
}
type = vec.isEnum() ? "enum" : vec.isUUID() ? "uuid" : vec.isString() ? "string" : (vec.isInt() ? (vec.isTime() ? "time" : "int") : "real");
domain = vec.domain();
if (vec.isEnum()) {
domain_cardinality = domain.length;
} else {
domain_cardinality = 0;
}
len = (int)Math.min(len,vec.length()-off);
if( vec.isUUID() ) {
string_data = new String[len];
for (int i = 0; i < len; i++)
string_data[i] = vec.isNA(off + i) ? null : PrettyPrint.UUID(vec.at16l(off + i), vec.at16h(off + i));
data = null;
} else if ( vec.isString() ) {
string_data = new String[len];
ValueString vstr = new ValueString();
for (int i = 0; i < len; i++)
string_data[i] = vec.isNA(off + i) ? null : vec.atStr(vstr,off + i).toString();
data = null;
} else {
data = MemoryManager.malloc8d(len);
for( int i=0; i<len; i++ )
data[i] = vec.at(off+i);
string_data = null;
}
_vec = vec; // Better HTML display, not in the JSON
if (len > 0) // len == 0 is presumed to be a header file
precision = vec.chunkForRow(0).precision();
}
public void clearBinsField() {
this.histogram_bins = null;
}
}
public FrameV3() { super(); }
/* Key-only constructor, for the times we only want to return the key. */
FrameV3(Key frame_id) { this.frame_id = new FrameKeyV3(frame_id); }
FrameV3(Frame fr) {
this(fr, 1, (int)fr.numRows(), 0, 0); // NOTE: possible row len truncation
}
FrameV3(Frame f, long row_offset, int row_count) {
this(f, row_offset, row_count, 0, 0);
}
FrameV3(Frame f, long row_offset, int row_count, int column_offset, int column_count) {
this.fillFromImpl(f, row_offset, row_count, column_offset, column_count, ColV3.NO_SUMMARY);
}
@Override public FrameV3 fillFromImpl(Frame f) {
return fillFromImpl(f, 1, (int)f.numRows(), 0, 0, ColV3.NO_SUMMARY);
}
public FrameV3 fillFromImpl(Frame f, long row_offset, int row_count, int column_offset, int column_count, boolean force_summary) {
if( row_count == 0 ) row_count = 100; // 100 rows by default
if( column_count == 0 ) column_count = f.numCols() - column_offset; // full width by default
row_count = (int)Math.min(row_count, row_offset + f.numRows());
column_count = (int) Math.min(column_count, column_offset + f.numCols());
this.frame_id = new FrameKeyV3(f._key);
this.checksum = f.checksum();
this.byte_size = f.byteSize();
this.row_offset = row_offset;
this.rows = f.numRows();
this.row_count = row_count;
this.total_column_count = f.numCols();
this.column_offset = column_offset;
this.column_count = column_count;
this.columns = new ColV3[column_count];
Key[] keys = f.keys();
if(keys != null && keys.length > 0) {
vec_ids = new VecKeyV3[column_count];
for (int i = 0; i < column_count; i++)
vec_ids[i] = new VecKeyV3(keys[column_offset + i]);
}
Vec[] vecs = f.vecs();
for( int i = 0; i < column_count; i++ ) {
try {
columns[i] = new ColV3(f._names[column_offset + i], vecs[column_offset + i], this.row_offset, this.row_count, force_summary);
}
catch (Exception e) {
Log.err("Caught exception processing FrameV2(", f._key.toString(), "): Vec: " + f._names[column_offset + i], e);
throw e;
}
}
this.is_text = f.numCols()==1 && vecs[0] instanceof ByteVec;
this.default_percentiles = Vec.PERCENTILES;
ChunkSummary cs = FrameUtils.chunkSummary(f);
TwoDimTable chunk_summary_table = cs.toTwoDimTableChunkTypes();
this.chunk_summary = (TwoDimTableBase)Schema.schema(this.getSchemaVersion(), chunk_summary_table).fillFromImpl(chunk_summary_table);
TwoDimTable distribution_summary_table = cs.toTwoDimTableDistribution();
distribution_summary = (TwoDimTableBase)Schema.schema(this.getSchemaVersion(), distribution_summary_table).fillFromImpl(distribution_summary_table);
this._fr = f;
return this;
}
public void clearBinsField() {
for (ColV3 col: columns)
col.clearBinsField();
}
@Override public HTML writeHTML_impl( HTML ab ) {
String[] urls = RequestServer.frameChoices(getSchemaVersion(),_fr);
for( String url : urls )
ab.href("hex",url,url);
// Main data display
// Column names
String titles[] = new String[_fr._names.length+1];
titles[0]="";
System.arraycopy(_fr._names,0,titles,1,_fr._names.length);
ab.arrayHead(titles);
// Rollup data
final long nrows = _fr.numRows();
formatRow(ab,"","type" ,new ColOp() { String op(ColV3 c) { return c.type; } } );
formatRow(ab,"","min" ,new ColOp() { String op(ColV3 c) { return rollUpStr(c, c.missing_count ==nrows ? Double.NaN : c.mins[0]); } } );
formatRow(ab,"","max" ,new ColOp() { String op(ColV3 c) { return rollUpStr(c, c.missing_count ==nrows ? Double.NaN : c.maxs[0]); } } );
formatRow(ab,"","mean" ,new ColOp() { String op(ColV3 c) { return rollUpStr(c, c.missing_count ==nrows ? Double.NaN : c.mean ); } } );
formatRow(ab,"","sigma",new ColOp() { String op(ColV3 c) { return rollUpStr(c, c.missing_count ==nrows ? Double.NaN : c.sigma ); } } );
// Optional rows: missing elements, zeros, positive & negative infinities, levels
for( ColV3 c : columns ) if( c.missing_count > 0 )
{ formatRow(ab,"class='warning'","missing",new ColOp() { String op(ColV3 c) { return c.missing_count == 0 ?"":Long.toString(c.missing_count);}}); break; }
for( ColV3 c : columns ) if( c.zero_count > 0 )
{ formatRow(ab,"class='warning'","zeros" ,new ColOp() { String op(ColV3 c) { return c.zero_count == 0 ?"":Long.toString(c.zero_count);}}); break; }
for( ColV3 c : columns ) if( c.positive_infinity_count > 0 )
{ formatRow(ab,"class='warning'","+infins",new ColOp() { String op(ColV3 c) { return c.positive_infinity_count == 0 ?"":Long.toString(c.positive_infinity_count);}}); break; }
for( ColV3 c : columns ) if( c.negative_infinity_count > 0 )
{ formatRow(ab,"class='warning'","-infins",new ColOp() { String op(ColV3 c) { return c.negative_infinity_count == 0 ?"":Long.toString(c.negative_infinity_count);}}); break; }
for( ColV3 c : columns ) if( c.domain!=null)
{ formatRow(ab,"class='warning'","levels" ,new ColOp() { String op(ColV3 c) { return c.domain==null?"":Long.toString(c.domain.length);}}); break; }
// Frame data
final int len = columns.length > 0 ? columns[0].data.length : 0;
for( int i=0; i<len; i++ ) {
final int row = i;
formatRow(ab,"",Long.toString(row_offset +row+1),new ColOp() {
String op(ColV3 c) {
return formatCell(c.data==null?0:c.data[row],c.string_data ==null?null:c.string_data[row],c,0); }
} );
}
ab.arrayTail();
return ab.bodyTail();
}
private abstract static class ColOp { abstract String op(ColV3 v); }
private String rollUpStr(ColV3 c, double d) {
return formatCell(c.domain!=null || "uuid".equals(c.type) || "string".equals(c.type) ? Double.NaN : d,null,c,4);
}
private void formatRow( HTML ab, String color, String msg, ColOp vop ) {
ab.p("<tr").p(color).p(">");
ab.cell(msg);
for( ColV3 c : columns ) ab.cell(vop.op(c));
ab.p("</tr>");
}
private String formatCell( double d, String str, ColV3 c, int precision ) {
if( Double.isNaN(d) ) return "-";
if( c.domain!=null ) return c.domain[(int)d];
if( "uuid".equals(c.type) || "string".equals(c.type)) {
// UUID and String handling
if( str==null ) return "-";
return "<b style=\"font-family:monospace;\">"+str+"</b>";
}
long l = (long)d;
if( (double)l == d ) return Long.toString(l);
if( precision > 0 ) return x2(d,PrettyPrint.pow10(-precision));
Chunk chk = c._vec.chunkForRow(row_offset);
Class Cc = chk.getClass();
if( Cc == C1SChunk.class ) return x2(d,((C1SChunk)chk).scale());
if( Cc == C2SChunk.class ) return x2(d,((C2SChunk)chk).scale());
if( Cc == C4SChunk.class ) return x2(d,((C4SChunk)chk).scale());
return Double.toString(d);
}
private static String x2( double d, double scale ) {
String s = Double.toString(d);
// Double math roundoff error means sometimes we get very long trailing
// strings of junk 0's with 1 digit at the end... when we *know* the data
// has only "scale" digits. Chop back to actual digits
int ex = (int)Math.log10(scale);
int x = s.indexOf('.');
int y = x+1+(-ex);
if( x != -1 && y < s.length() ) s = s.substring(0,x+1+(-ex));
while( s.charAt(s.length()-1)=='0' )
s = s.substring(0,s.length()-1);
return s;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.visualizers;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.jmeter.extractor.XPath2Extractor;
import org.apache.jmeter.gui.util.JSyntaxTextArea;
import org.apache.jmeter.gui.util.JTextScrollPane;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jmeter.util.XPathUtil;
import org.fife.ui.rsyntaxtextarea.SyntaxConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implement ResultsRender for XPath tester
*/
public class RenderAsXPath2 implements ResultRenderer, ActionListener {
private static final Logger log = LoggerFactory.getLogger(RenderAsXPath2.class);
private static final String XPATH_TESTER_COMMAND = "xpath_tester"; // $NON-NLS-1$
private static final String XPATH_NAMESPACES_COMMAND = "xpath_namespaces"; // $NON-NLS-1$
private JPanel xmlWithXPathPane;
private JSyntaxTextArea xmlDataField;
private JTextField xpathExpressionField;
private JSyntaxTextArea xpathResultField;
private JTabbedPane rightSide;
// Should we return fragment as text, rather than text of fragment?
private final JCheckBox getFragment =
new JCheckBox(JMeterUtils.getResString("xpath_tester_fragment"));//$NON-NLS-1$
private JSyntaxTextArea namespacesTA;
/** {@inheritDoc} */
@Override
public void clearData() {
// N.B. don't set xpathExpressionField to empty to keep xpath
this.xmlDataField.setText(""); // $NON-NLS-1$
this.xpathResultField.setText(""); // $NON-NLS-1$
}
/** {@inheritDoc} */
@Override
public void init() {
// Create the panels for the xpath tab
xmlWithXPathPane = createXpathExtractorPanel();
}
/**
* Display the response as text or as rendered HTML. Change the text on the
* button appropriate to the current display.
*
* @param e the ActionEvent being processed
*/
@Override
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
String xmlDataFieldText = xmlDataField.getText();
if (StringUtils.isEmpty(xmlDataFieldText)) {
return;
}
if (XPATH_TESTER_COMMAND.equals(command)) {
XPath2Extractor extractor = new XPath2Extractor();
extractor.setFragment(getFragment.isSelected());
executeAndShowXPathTester(xmlDataFieldText, extractor);
}
else if (XPATH_NAMESPACES_COMMAND.equals(command)) {
this.xpathResultField.setText(getDocumentNamespaces(xmlDataFieldText));
}
}
/**
* Launch xpath engine to parse a input text
* @param textToParse
*/
private void executeAndShowXPathTester(String textToParse, XPath2Extractor extractor) {
if (textToParse != null && textToParse.length() > 0
&& this.xpathExpressionField.getText().length() > 0) {
this.xpathResultField.setText(process(textToParse, extractor));
this.xpathResultField.setCaretPosition(0); // go to first line
}
}
private String process(String textToParse, XPath2Extractor extractor) {
try {
List<String> matchStrings = new ArrayList<>();
XPathUtil.putValuesForXPathInListUsingSaxon(textToParse, xpathExpressionField.getText(),
matchStrings, extractor.getFragment(), -1, namespacesTA.getText());
StringBuilder builder = new StringBuilder();
int nbFound = matchStrings.size();
builder.append("Match count: ").append(nbFound).append("\n");
for (int i = 0; i < nbFound; i++) {
builder.append("Match[").append(i+1).append("]=").append(matchStrings.get(i)).append("\n");
}
return builder.toString();
} catch (Exception e) {
return "Exception:"+ ExceptionUtils.getStackTrace(e);
}
}
private String getDocumentNamespaces(String textToParse) {
StringBuilder result = new StringBuilder();
try {
List<String[]> namespaces = XPathUtil.getNamespaces(textToParse);
for (int i = 0;i<namespaces.size();i++) {
result.append(namespaces.get(i)[0])
.append('=') // $NON-NLS-1$
.append(namespaces.get(i)[1])
.append('\n'); // $NON-NLS-1$
}
return result.toString();
} catch (Exception e) {
return "Exception:"+ ExceptionUtils.getStackTrace(e);
}
}
/*================= internal business =================*/
/** {@inheritDoc} */
@Override
public void renderResult(SampleResult sampleResult) {
String response = ViewResultsFullVisualizer.getResponseAsString(sampleResult);
try {
xmlDataField.setText(response == null ? "" : response);
xmlDataField.setCaretPosition(0);
} catch (Exception e) {
log.error("Exception converting to XML: {}, message: {}", response, e.getMessage(), e);
xmlDataField.setText("Exception converting to XML:"+response+ ", message:"+e.getMessage());
xmlDataField.setCaretPosition(0);
}
}
/** {@inheritDoc} */
@Override
public String toString() {
return JMeterUtils.getResString("xpath2_tester"); // $NON-NLS-1$
}
/** {@inheritDoc} */
@Override
public void setupTabPane() {
// Add xpath tester pane
if (rightSide.indexOfTab(JMeterUtils.getResString("xpath_tester_title")) < 0) { // $NON-NLS-1$
rightSide.addTab(JMeterUtils.getResString("xpath_tester_title"), xmlWithXPathPane); // $NON-NLS-1$
}
clearData();
}
/**
* @return XPath Tester panel
*/
private JPanel createXpathExtractorPanel() {
xmlDataField = JSyntaxTextArea.getInstance(50, 80, true);
xmlDataField.setCodeFoldingEnabled(true);
xmlDataField.setEditable(true);
xmlDataField.setBracketMatchingEnabled(false);
xmlDataField.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_XML);
xmlDataField.setLanguage(SyntaxConstants.SYNTAX_STYLE_XML);
xmlDataField.setLineWrap(true);
xmlDataField.setWrapStyleWord(true);
JScrollPane xmlDataPane = JTextScrollPane.getInstance(xmlDataField, true);
xmlDataPane.setMinimumSize(new Dimension(0, 100));
xmlDataPane.setPreferredSize(new Dimension(0, 200));
JPanel highPane = new JPanel(new GridLayout(1, 1));
highPane.add(xmlDataPane);
JPanel pane = new JPanel(new GridLayout(1, 1));
JSplitPane mainSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT,
highPane, createXpathExtractorTasksPanel());
mainSplit.setDividerLocation(0.5d);
mainSplit.setOneTouchExpandable(true);
pane.add(mainSplit);
return pane;
}
/**
* Create the XPath task pane
*
* @return XPath task pane
*/
private JPanel createXpathExtractorTasksPanel() {
xpathExpressionField = new JTextField(20);
xpathExpressionField.setMinimumSize(new Dimension(50, 20));
JLabel label = new JLabel(JMeterUtils.getResString("xpath_tester_field")); // $NON-NLS-1$
JButton xpathTester = new JButton(JMeterUtils.getResString("xpath_tester_button_test")); // $NON-NLS-1$
xpathTester.setActionCommand(XPATH_TESTER_COMMAND);
xpathTester.addActionListener(this);
JButton xpathTesterNamespaces = new JButton(JMeterUtils.getResString("xpath_namespaces")); // $NON-NLS-1$
xpathTesterNamespaces.setActionCommand(XPATH_NAMESPACES_COMMAND);
xpathTesterNamespaces.addActionListener(this);
JPanel panel = new JPanel(new GridBagLayout());
GridBagConstraints gbc = new GridBagConstraints();
initConstraints(gbc);
panel.add(label, gbc.clone());
gbc.gridx++;
gbc.gridwidth = 2;
panel.add(xpathExpressionField, gbc.clone());
gbc.gridx+=2;
gbc.gridwidth = 2;
panel.add(xpathTester, gbc.clone());
gbc.gridx+=2;
gbc.gridwidth = 2;
panel.add(xpathTesterNamespaces, gbc.clone());
gbc.weighty = 1;
gbc.gridx = 0;
gbc.gridy++;
gbc.gridwidth = 2;
panel.add(new JLabel(JMeterUtils.getResString("xpath_extractor_user_namespaces")), gbc.clone());
gbc.gridx++;
gbc.gridwidth = 6;
namespacesTA = JSyntaxTextArea.getInstance(5, 40);
panel.add(JTextScrollPane.getInstance(namespacesTA, true), gbc.clone());
gbc.gridx = 0;
gbc.gridy++;
gbc.gridwidth = 8;
panel.add(getFragment, gbc.clone());
gbc.gridx = 0;
gbc.gridy++;
gbc.gridwidth = 8;
xpathResultField = JSyntaxTextArea.getInstance(10, 70, true);
xpathResultField.setEditable(false);
xpathResultField.setLineWrap(true);
xpathResultField.setWrapStyleWord(true);
gbc.fill = GridBagConstraints.HORIZONTAL;
panel.add(JTextScrollPane.getInstance(xpathResultField, true), gbc.clone());
return panel;
}
private void initConstraints(GridBagConstraints gbc) {
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.gridheight = 1;
gbc.gridwidth = 1;
gbc.gridx = 0;
gbc.gridy = 0;
gbc.weightx = 1;
gbc.weighty = 1;
}
/** {@inheritDoc} */
@Override
public synchronized void setRightSide(JTabbedPane side) {
rightSide = side;
}
/** {@inheritDoc} */
@Override
public void setSamplerResult(Object userObject) {
// NOOP
}
/** {@inheritDoc} */
@Override
public void setLastSelectedTab(int index) {
// nothing to do
}
/** {@inheritDoc} */
@Override
public void renderImage(SampleResult sampleResult) {
clearData();
xmlDataField.setText(JMeterUtils.getResString("xpath_tester_no_text")); // $NON-NLS-1$
}
/** {@inheritDoc} */
@Override
public void setBackgroundColor(Color backGround) {
// NOOP
}
}
| |
//
// Translated by CS2J (http://www.cs2j.com): 12.11.2016 09:40:46
//
package Reasoner.IntervalPropagation;
import AutoDiff.Abs;
import AutoDiff.And;
import AutoDiff.Atan2;
import AutoDiff.ConstPower;
import AutoDiff.Constant;
import AutoDiff.ConstraintUtility;
import AutoDiff.Cos;
import AutoDiff.Exp;
import AutoDiff.ITermVisitor;
import AutoDiff.LTConstraint;
import AutoDiff.LTEConstraint;
import AutoDiff.LinSigmoid;
import AutoDiff.Log;
import AutoDiff.Max;
import AutoDiff.Min;
import AutoDiff.Negation;
import AutoDiff.Or;
import AutoDiff.Product;
import AutoDiff.Reification;
import AutoDiff.Sigmoid;
import AutoDiff.Sin;
import AutoDiff.Sum;
import AutoDiff.Term;
import AutoDiff.TermPower;
import AutoDiff.Zero;
import org.apache.commons.lang3.NotImplementedException;
//#define DEBUG_DP
//using Alica.Reasoner;
//using Al=Alica;
public class DownwardPropagator implements ITermVisitor<Boolean>
{
public TermList Changed;
//internal Queue<Term> Changed;
private void addChanged(Term t) throws Exception {
if (!Changed.contains(t))
Changed.enqueue(t);
}
public DownwardPropagator() throws Exception {
}
public Boolean visit(Constant constant) throws Exception {
return false;
}
public Boolean visit(Zero zero) throws Exception {
return false;
}
public Boolean visit(ConstPower intPower) throws Exception {
if (intPower.Max == Double.POSITIVE_INFINITY || intPower.Min == Double.NEGATIVE_INFINITY)
return false;
double a = Math.pow(intPower.Min, 1 / intPower.getExponent());
double b = Math.pow(intPower.Max, 1 / intPower.getExponent());
Boolean isRational = intPower.getExponent() != Math.round(intPower.getExponent());
if (isRational)
{
if (updateInterval(intPower.getBase(), Math.max(0, Math.min(a, b)), Math.max(a, Math.max(-a, Math.max(b, -b)))))
{
addChanged(intPower.getBase());
return true;
}
}
else
{
double min;
double max;
if (intPower.getExponent() >= 0)
{
if (intPower.getBase().Max <= 0)
{
max = Math.max(-Math.abs(a), -Math.abs(b));
}
else
max = Math.max(a, Math.max(-a, Math.max(b, -b)));
if (intPower.getBase().Min >= 0)
{
min = Math.min(Math.abs(a), Math.abs(b));
}
else
min = Math.min(a, Math.min(-a, Math.min(b, -b)));
}
else
{
//this case can be improved
max = Math.max(a, Math.max(-a, Math.max(b, -b)));
min = Math.min(a, Math.min(-a, Math.min(b, -b)));
}
if (updateInterval(intPower.getBase(), min, max))
{
addChanged(intPower.getBase());
return true;
}
}
return false;
}
public Boolean visit(TermPower tp) throws Exception {
throw new NotImplementedException("Propagation for TermPower not implemented");
}
public Boolean visit(Product product) throws Exception {
/*
* a*b = c
* ==> a = c/b
* */
if (product.getLeft() == product.getRight())
{
double a = Math.sqrt(product.Min);
double b = Math.sqrt(product.Max);
double min;
double max;
if (product.getLeft().Max <= 0)
{
max = Math.max(-a, -b);
}
else
max = Math.max(a, b);
if (product.getLeft().Min >= 0)
{
min = Math.min(a, b);
}
else
min = Math.min(-a, -b);
if (updateInterval(product.getLeft(), min, max))
{
addChanged(product.getLeft());
return true;
}
}
else
{
Boolean c = false, d = false;
if (product.getRight().Min * product.getRight().Max > 0)
{
//Left:
double aa = product.Min / product.getRight().Min;
double ab = product.Min / product.getRight().Max;
double ba = product.Max / product.getRight().Min;
double bb = product.Max / product.getRight().Max;
double min = Math.min(aa, Math.min(ab, Math.min(ba, bb)));
double max = Math.max(aa, Math.max(ab, Math.max(ba, bb)));
c = updateInterval(product.getLeft(), min, max);
if (c)
addChanged(product.getLeft());
}
if (product.getLeft().Min * product.getLeft().Max > 0)
{
//Right:
double aa = product.Min / product.getLeft().Min;
double ab = product.Min / product.getLeft().Max;
double ba = product.Max / product.getLeft().Min;
double bb = product.Max / product.getLeft().Max;
double min = Math.min(aa, Math.min(ab, Math.min(ba, bb)));
double max = Math.max(aa, Math.max(ab, Math.max(ba, bb)));
d = updateInterval(product.getRight(), min, max);
if (d)
addChanged(product.getRight());
}
return c || d;
}
return false;
}
public Boolean visit(Sigmoid sigmoid) throws Exception {
throw new NotImplementedException("Sigmoidal propagation not implemented");
}
public Boolean visit(LinSigmoid sigmoid) throws Exception {
throw new NotImplementedException("LinSigmoid propagation not implemented");
}
public Boolean visit(LTConstraint constraint) throws Exception {
Boolean changed = false;
if (constraint.Min > 0)
{
if (updateInterval(constraint.getRight(), constraint.getLeft().Min, Double.POSITIVE_INFINITY))
{
addChanged(constraint.getRight());
changed = true;
}
if (updateInterval(constraint.getLeft(), Double.NEGATIVE_INFINITY, constraint.getRight().Max))
{
addChanged(constraint.getLeft());
changed = true;
}
}
else if (constraint.Max <= 0)
{
if (updateInterval(constraint.getRight(), Double.NEGATIVE_INFINITY, constraint.getLeft().Max))
{
addChanged(constraint.getRight());
changed = true;
}
if (updateInterval(constraint.getLeft(), constraint.getRight().Min, Double.POSITIVE_INFINITY))
{
addChanged(constraint.getLeft());
changed = true;
}
}
return changed;
}
public Boolean visit(LTEConstraint constraint) throws Exception {
Boolean changed = false;
if (constraint.Min > 0)
{
if (updateInterval(constraint.getRight(), constraint.getLeft().Min, Double.POSITIVE_INFINITY))
{
addChanged(constraint.getRight());
changed = true;
}
if (updateInterval(constraint.getLeft(), Double.NEGATIVE_INFINITY, constraint.getRight().Max))
{
addChanged(constraint.getLeft());
changed = true;
}
}
else if (constraint.Max <= 0)
{
if (updateInterval(constraint.getRight(), Double.NEGATIVE_INFINITY, constraint.getLeft().Max))
{
addChanged(constraint.getRight());
changed = true;
}
if (updateInterval(constraint.getLeft(), constraint.getRight().Min, Double.POSITIVE_INFINITY))
{
addChanged(constraint.getLeft());
changed = true;
}
}
return changed;
}
public Boolean visit(Min min) throws Exception {
Boolean c1 = updateInterval(min.getLeft(), min.Min, Double.POSITIVE_INFINITY);
Boolean c2 = updateInterval(min.getRight(), min.Min, Double.POSITIVE_INFINITY);
if (c1)
addChanged(min.getLeft());
if (c2)
addChanged(min.getRight());
return c1 || c2;
}
public Boolean visit(Max max) throws Exception {
if (max.Min > 0)
{
Boolean c = false;
if (max.getLeft().Max <= 0)
{
Boolean c1 = updateInterval(max.getRight(), 1, 1);
if (c1)
addChanged(max.getRight());
c |= c1;
}
if (max.getRight().Max <= 0)
{
Boolean c2 = updateInterval(max.getLeft(), 1, 1);
if (c2)
addChanged(max.getLeft());
c |= c2;
}
return c;
}
return false;
}
public Boolean visit(And and) throws Exception {
Boolean changed = false;
if (and.Min > 0)
{
if (updateInterval(and.getLeft(), 1, 1))
{
addChanged(and.getLeft());
changed = true;
}
if (updateInterval(and.getRight(), 1, 1))
{
addChanged(and.getRight());
changed = true;
}
}
return changed;
}
public Boolean visit(Or or) throws Exception {
throw new NotImplementedException("Or operator progation not implemented (max is used)");
}
//return false;
public Boolean visit(ConstraintUtility cu) throws Exception {
Boolean c = false;
if (cu.Min >= 1)
{
if (updateInterval(cu.getConstraint(), 1, 1))
{
addChanged(cu.getConstraint());
c = true;
}
if (updateInterval(cu.getUtility(), 1, cu.Max))
{
addChanged(cu.getUtility());
c = true;
}
}
return c;
}
public Boolean visit(Sum sum) throws Exception {
//a+b= c
// a= b-c
//a:
Boolean changed = false;
Boolean anychange = false;
do
{
changed = false;
for (int i = 0;i < sum.getTerms().length;++i)
{
double minother = 0;
double maxother = 0;
for (int j = 0;j < sum.getTerms().length;++j)
{
if (i == j)
continue;
minother += sum.getTerms()[j].Min;
maxother += sum.getTerms()[j].Max;
}
/*Console.WriteLine("-S({0} {1})",sum.Min,sum.Max);
Console.WriteLine("-O({0} {1})",maxother,minother);
Console.WriteLine("->DW {0} to {1} {2} I am {3}",t,sum.Min-maxother,sum.Max-minother,sum);
*/
if (updateInterval(sum.getTerms()[i], sum.Min - maxother, sum.Max - minother))
{
addChanged(sum.getTerms()[i]);
changed = true;
anychange = true;
}
}
}
while (changed);
return anychange;
}
public Boolean visit(AutoDiff.Variable variable) throws Exception {
return false;
}
public Boolean visit(Reification reif) throws Exception {
Boolean c = false;
if (reif.Max < reif.getMaxVal())
{
c = updateInterval(reif, reif.getMinVal(), reif.getMinVal());
if (c)
addChanged(reif);
if (updateInterval(reif.getCondition(), Double.NEGATIVE_INFINITY, 0))
{
addChanged(reif.getCondition());
c = true;
}
}
else if (reif.Min > reif.getMinVal())
{
c = updateInterval(reif, reif.getMaxVal(), reif.getMaxVal());
if (c)
addChanged(reif);
if (updateInterval(reif.getCondition(), 1, 1))
{
addChanged(reif.getCondition());
c = true;
}
}
return c;
}
@Override
public Boolean visit(final Negation r) throws Exception
{
throw new NotImplementedException("Negation");
}
public Boolean visit(Log log) throws Exception {
double a = Math.exp(log.Min);
double b = Math.exp(log.Max);
if (updateInterval(log.getArg(), a, b))
{
addChanged(log.getArg());
return true;
}
return false;
}
public Boolean visit(Sin sin) throws Exception {
if (sin.Min == -1.0 && sin.Max == 1.0)
return false;
double cdist = sin.getArg().Max - sin.getArg().Min;
if (cdist >= Math.PI)
return false;
//Console.WriteLine("Sine Prop Sine interval: [{0}, {1}]",sin.Min,sin.Max);
//Console.WriteLine("getArg() interval: [{0}, {1}]",sin.getArg().Min,sin.getArg().Max);
double a = Math.asin(sin.Min);
double b = Math.asin(sin.Max);
//-pi/2..pi/2
double t;
if (a > b)
{
t = b;
b = a;
a = t;
}
//now a<= b;
double c = Math.PI - b;
double d = Math.PI - a;
double n1 = Math.ceil((sin.getArg().Min - a) / (2 * Math.PI));
//double n1a = Math.Floor((sin.getArg().Max - a) / (2*Math.PI));
double n2 = Math.floor((sin.getArg().Max - b) / (2 * Math.PI));
//double n2a = Math.Ceiling((sin.getArg().Min - b) / (2*Math.PI));
double n3 = Math.ceil((sin.getArg().Min - c) / (2 * Math.PI));
//double n3a = Math.Floor((sin.getArg().Max - c) / (2*Math.PI));
double n4 = Math.floor((sin.getArg().Max - d) / (2 * Math.PI));
//double n4a = Math.Ceiling((sin.getArg().Min - d) / (2*Math.PI));
//Console.WriteLine("N: {0} {1} {2} {3}",n1,n2,n3,n4);
//Console.WriteLine("P: {0} {1} {2} {3}",n1*2*Math.PI+a,n2*2*Math.PI+b,n3*2*Math.PI+c,n4*2*Math.PI+d);
double min = Double.MAX_VALUE;
double max = -Double.MAX_VALUE;
double n1a = n1 * 2 * Math.PI + a;
double n2b = n2 * 2 * Math.PI + b;
Boolean faulty = true;
if (n1a <= sin.getArg().Max && n2b >= sin.getArg().Min)
{
//interval 1 completely enclosed
min = Math.min(min, n1a);
max = Math.max(max, n2b);
faulty = false;
}
else
{
}
//no bound is inside as adding interval is smaller than pi
double n3c = n3 * 2 * Math.PI + c;
double n4d = n4 * 2 * Math.PI + d;
if (n3c <= sin.getArg().Max && n4d >= sin.getArg().Min)
{
//interval 2 completely enclosed
min = Math.min(min, n3c);
max = Math.max(max, n4d);
faulty = false;
}
else
{
}
//no bound is inside as adding interval is smaller than pi
if (faulty)
{
throw new UnsolveableException();
}
//return false; //updateInterval(sin.getArg(),sin.getArg().Max,sin.getArg().Min); //no solution possible
/*if (n1 == n2) { //bound within interval
min = Math.min(min,n1*2*Math.PI+a);
max = Math.max(max,n2*2*Math.PI+b);
} else {
if (n1 > n2) { //lower bound cut
min = Math.min(min,sin.getArg().Min);
max = Math.max(max,n2*2*Math.PI+b);
double k =
}
}
//if (n1 == n2 && n3 == n4) { //bind to rectangle:
double min = Math.min(n1*2*Math.PI+a,n3*2*Math.PI+c);
double max = Math.max(n2*2*Math.PI+b,n4*2*Math.PI+d);
*/
//}
if (min == Double.MAX_VALUE)
min = Double.NEGATIVE_INFINITY;
if (max == -Double.MAX_VALUE)
max = Double.POSITIVE_INFINITY;
if (updateInterval(sin.getArg(), min, max))
{
addChanged(sin.getArg());
return true;
}
return false;
}
public Boolean visit(Cos cos) throws Exception {
if (cos.Min == -1.0 && cos.Max == 1.0)
return false;
double cdist = cos.getArg().Max - cos.getArg().Min;
if (cdist >= Math.PI)
return false;
//Console.WriteLine("Cos Prop Sine interval: [{0}, {1}]",cos.Min,cos.Max);
//Console.WriteLine("getArg() interval: [{0}, {1}]",cos.getArg().Min,cos.getArg().Max);
double a = Math.acos(cos.Min);
double b = Math.acos(cos.Max);
//0..pi
double t;
if (a > b)
{
t = b;
b = a;
a = t;
}
//now a<= b;
double c = -b;
double d = -a;
double n1 = Math.ceil((cos.getArg().Min - a) / (2 * Math.PI));
//double n1a = Math.Floor((sin.getArg().Max - a) / (2*Math.PI));
double n2 = Math.floor((cos.getArg().Max - b) / (2 * Math.PI));
//double n2a = Math.Ceiling((sin.getArg().Min - b) / (2*Math.PI));
double n3 = Math.ceil((cos.getArg().Min - c) / (2 * Math.PI));
//double n3a = Math.Floor((sin.getArg().Max - c) / (2*Math.PI));
double n4 = Math.floor((cos.getArg().Max - d) / (2 * Math.PI));
//double n4a = Math.Ceiling((sin.getArg().Min - d) / (2*Math.PI));
//Console.WriteLine("N: {0} {1} {2} {3}",n1,n2,n3,n4);
//Console.WriteLine("P: {0} {1} {2} {3}",n1*2*Math.PI+a,n2*2*Math.PI+b,n3*2*Math.PI+c,n4*2*Math.PI+d);
double min = Double.MAX_VALUE;
double max = -Double.MAX_VALUE;
double n1a = n1 * 2 * Math.PI + a;
double n2b = n2 * 2 * Math.PI + b;
Boolean faulty = true;
if (n1a <= cos.getArg().Max && n2b >= cos.getArg().Min)
{
//interval 1 completely enclosed
min = Math.min(min, n1a);
max = Math.max(max, n2b);
faulty = false;
}
else
{
}
//no bound is inside as adding interval is smaller than pi
double n3c = n3 * 2 * Math.PI + c;
double n4d = n4 * 2 * Math.PI + d;
if (n3c <= cos.getArg().Max && n4d >= cos.getArg().Min)
{
//interval 2 completely enclosed
min = Math.min(min, n3c);
max = Math.max(max, n4d);
faulty = false;
}
else
{
}
//no bound is inside as adding interval is smaller than pi
if (faulty)
{
throw new UnsolveableException();
}
//return false;//return updateInterval(cos.getArg(),cos.getArg().Max,cos.getArg().Min); //no solution possible
if (min == Double.MAX_VALUE)
min = -Double.MAX_VALUE;
if (max == -Double.MAX_VALUE)
max = Double.MAX_VALUE;
if (updateInterval(cos.getArg(), min, max))
{
addChanged(cos.getArg());
return true;
}
return false;
}
public Boolean visit(Abs abs) throws Exception {
if (updateInterval(abs.getArg(), -abs.Max, abs.Max))
{
addChanged(abs.getArg());
return true;
}
return false;
}
public Boolean visit(Exp exp) throws Exception {
double a = Math.log(exp.Min);
double b = Math.log(exp.Max);
if (updateInterval(exp.getArg(), a, b))
{
addChanged(exp.getArg());
return true;
}
return false;
}
public Boolean visit(Atan2 atan2) throws Exception {
throw new NotImplementedException("Atan2 propagation not implemented");
}
//return false;
protected void outputChange(Term t, double oldmin, double oldmax) throws Exception {
//Console.WriteLine("DW: Interval of {0} is now [{1}, {2}]",t,t.Min,t.Max);
double oldwidth = oldmax - oldmin;
double newwidth = t.Max - t.Min;
System.out.println("not yet implemented");
// if (t instanceof AutoDiff.Variable)
// Console.WriteLine("DW shrinking [{0}..{1}] to [{2}..{3}] by {4} ({5}%)", oldmin, oldmax, t.Min, t.Max, oldwidth - newwidth, (oldwidth - newwidth) / oldwidth * 100);
}
protected Boolean updateInterval(Term t, double min, double max) throws Exception {
Boolean ret = t.Min < min || t.Max > max;
if (!Double.isNaN(min))
t.Min = Math.max(t.Min, min);
if (!Double.isNaN(max))
t.Max = Math.min(t.Max, max);
if (ret)
IntervalPropagator.updates++;
IntervalPropagator.visits++;
if (t.Min > t.Max)
throw new UnsolveableException();
return ret;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tx;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.geode.DataSerializer;
import org.apache.geode.cache.Operation;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.cache.DistributedPutAllOperation;
import org.apache.geode.internal.cache.DistributedPutAllOperation.EntryVersionsList;
import org.apache.geode.internal.cache.DistributedPutAllOperation.PutAllEntryData;
import org.apache.geode.internal.cache.DistributedRemoveAllOperation;
import org.apache.geode.internal.cache.DistributedRemoveAllOperation.RemoveAllEntryData;
import org.apache.geode.internal.cache.EntryEventImpl;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.serialization.ByteArrayDataInput;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.KnownVersion;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.internal.serialization.StaticSerialization;
public class DistTxEntryEvent extends EntryEventImpl {
protected static final byte HAS_PUTALL_OP = 0x1;
protected static final byte HAS_REMOVEALL_OP = 0x2;
private String regionName;
/**
* TODO DISTTX: callers of this constructor need to make sure that release is called. In general
* the distributed tx code needs to be reviewed to see if it correctly handles off-heap.
*/
@Retained
public DistTxEntryEvent(EntryEventImpl entry) {
super(entry);
}
// For Serialization
public DistTxEntryEvent() {}
public String getRegionName() {
return this.regionName;
}
@Override
public KnownVersion[] getSerializationVersions() {
// TODO Auto-generated method stub
return null;
}
@Override
public int getDSFID() {
return DIST_TX_OP;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
DataSerializer.writeObject(this.eventID, out);
DataSerializer.writeObject(this.getRegion().getFullPath(), out);
out.writeByte(this.op.ordinal);
DataSerializer.writeObject(this.getKey(), out);
DataSerializer.writeInteger(this.keyInfo.getBucketId(), out);
DataSerializer.writeObject(this.basicGetNewValue(), out);
byte flags = 0;
if (this.putAllOp != null) {
flags |= HAS_PUTALL_OP;
}
if (this.removeAllOp != null) {
flags |= HAS_REMOVEALL_OP;
}
DataSerializer.writeByte(flags, out);
// handle putAll
if (this.putAllOp != null) {
putAllToData(out, context);
}
// handle removeAll
if (this.removeAllOp != null) {
removeAllToData(out, context);
}
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
this.eventID = (EventID) DataSerializer.readObject(in);
this.regionName = DataSerializer.readString(in);
this.op = Operation.fromOrdinal(in.readByte());
Object key = DataSerializer.readObject(in);
Integer bucketId = DataSerializer.readInteger(in);
this.keyInfo = new DistTxKeyInfo(key, null/*
* value [DISTTX} TODO see if required
*/, null/*
* callbackarg [DISTTX] TODO
*/, bucketId);
basicSetNewValue(DataSerializer.readObject(in), true);
byte flags = DataSerializer.readByte(in);
if ((flags & HAS_PUTALL_OP) != 0) {
putAllFromData(in, context);
}
if ((flags & HAS_REMOVEALL_OP) != 0) {
removeAllFromData(in, context);
}
}
private void putAllToData(DataOutput out,
SerializationContext context) throws IOException {
DataSerializer.writeInteger(this.putAllOp.putAllDataSize, out);
EntryVersionsList versionTags = new EntryVersionsList(this.putAllOp.putAllDataSize);
boolean hasTags = false;
final PutAllEntryData[] putAllData = this.putAllOp.getPutAllEntryData();
for (int i = 0; i < this.putAllOp.putAllDataSize; i++) {
if (!hasTags && putAllData[i].versionTag != null) {
hasTags = true;
}
VersionTag<?> tag = putAllData[i].versionTag;
versionTags.add(tag);
putAllData[i].versionTag = null;
putAllData[i].toData(out, context);
putAllData[i].versionTag = tag;
}
out.writeBoolean(hasTags);
if (hasTags) {
InternalDataSerializer.invokeToData(versionTags, out);
}
}
private void putAllFromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
int putAllSize = DataSerializer.readInteger(in);
PutAllEntryData[] putAllEntries = new PutAllEntryData[putAllSize];
if (putAllSize > 0) {
final KnownVersion version = StaticSerialization.getVersionForDataStreamOrNull(in);
final ByteArrayDataInput bytesIn = new ByteArrayDataInput();
for (int i = 0; i < putAllSize; i++) {
putAllEntries[i] = new PutAllEntryData(in, context, this.eventID, i);
}
boolean hasTags = in.readBoolean();
if (hasTags) {
EntryVersionsList versionTags = EntryVersionsList.create(in);
for (int i = 0; i < putAllSize; i++) {
putAllEntries[i].versionTag = versionTags.get(i);
}
}
}
this.op = Operation.PUTALL_CREATE;
this.setOriginRemote(true);
this.setGenerateCallbacks(true);
this.putAllOp = new DistributedPutAllOperation(this, putAllSize, false /* [DISTTX] TODO */);
this.putAllOp.setPutAllEntryData(putAllEntries);
}
private void removeAllToData(DataOutput out,
SerializationContext context) throws IOException {
DataSerializer.writeInteger(this.removeAllOp.removeAllDataSize, out);
EntryVersionsList versionTags = new EntryVersionsList(this.removeAllOp.removeAllDataSize);
boolean hasTags = false;
final RemoveAllEntryData[] removeAllData = this.removeAllOp.getRemoveAllEntryData();
for (int i = 0; i < this.removeAllOp.removeAllDataSize; i++) {
if (!hasTags && removeAllData[i].versionTag != null) {
hasTags = true;
}
VersionTag<?> tag = removeAllData[i].versionTag;
versionTags.add(tag);
removeAllData[i].versionTag = null;
removeAllData[i].serializeTo(out, context);
removeAllData[i].versionTag = tag;
}
out.writeBoolean(hasTags);
if (hasTags) {
InternalDataSerializer.invokeToData(versionTags, out);
}
}
private void removeAllFromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
int removeAllSize = DataSerializer.readInteger(in);
final RemoveAllEntryData[] removeAllData = new RemoveAllEntryData[removeAllSize];
final KnownVersion version = StaticSerialization.getVersionForDataStreamOrNull(in);
final ByteArrayDataInput bytesIn = new ByteArrayDataInput();
for (int i = 0; i < removeAllSize; i++) {
removeAllData[i] = new RemoveAllEntryData(in, this.eventID, i, context);
}
boolean hasTags = in.readBoolean();
if (hasTags) {
EntryVersionsList versionTags = EntryVersionsList.create(in);
for (int i = 0; i < removeAllSize; i++) {
removeAllData[i].versionTag = versionTags.get(i);
}
}
this.op = Operation.REMOVEALL_DESTROY;
this.setOriginRemote(true);
this.setGenerateCallbacks(true);
this.removeAllOp =
new DistributedRemoveAllOperation(this, removeAllSize, false /* [DISTTX] TODO */);
this.removeAllOp.setRemoveAllEntryData(removeAllData);
}
public void setDistributedMember(DistributedMember sender) {
this.distributedMember = sender;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(getShortClassName());
buf.append("[");
buf.append("eventID=");
buf.append(this.eventID);
if (this.getRegion() != null) {
buf.append(";r=").append(this.getRegion().getName());
}
buf.append(";op=");
buf.append(getOperation());
buf.append(";key=");
buf.append(this.getKey());
buf.append(";bucket=");
buf.append(this.getKeyInfo().getBucketId());
buf.append(";oldValue=");
if (this.putAllOp != null) {
buf.append(";putAllDataSize :" + this.putAllOp.putAllDataSize);
}
if (this.removeAllOp != null) {
buf.append(";removeAllDataSize :" + this.removeAllOp.removeAllDataSize);
}
buf.append("]");
return buf.toString();
}
}
| |
package uk.co.chrisjenx.calligraphy;
import android.os.Build;
import android.text.TextUtils;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.MultiAutoCompleteTextView;
import android.widget.RadioButton;
import android.widget.TextView;
import android.widget.ToggleButton;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Created by chris on 20/12/2013
* Project: Calligraphy
*/
public class CalligraphyConfig {
/**
* The default styles for the factory to lookup. The builder builds an extended immutable
* map of this with any additional custom styles.
*/
private static final Map<Class<? extends TextView>, Integer> DEFAULT_STYLES = new HashMap<>();
static {
{
DEFAULT_STYLES.put(TextView.class, android.R.attr.textViewStyle);
DEFAULT_STYLES.put(Button.class, android.R.attr.buttonStyle);
DEFAULT_STYLES.put(EditText.class, android.R.attr.editTextStyle);
DEFAULT_STYLES.put(AutoCompleteTextView.class, android.R.attr.autoCompleteTextViewStyle);
DEFAULT_STYLES.put(MultiAutoCompleteTextView.class, android.R.attr.autoCompleteTextViewStyle);
DEFAULT_STYLES.put(CheckBox.class, android.R.attr.checkboxStyle);
DEFAULT_STYLES.put(RadioButton.class, android.R.attr.radioButtonStyle);
DEFAULT_STYLES.put(ToggleButton.class, android.R.attr.buttonStyleToggle);
}
}
private static CalligraphyConfig sInstance;
/**
* Set the default Calligraphy Config
*
* @param calligraphyConfig the config build using the builder.
* @see uk.co.chrisjenx.calligraphy.CalligraphyConfig.Builder
*/
public static void initDefault(CalligraphyConfig calligraphyConfig) {
sInstance = calligraphyConfig;
}
/**
* The current Calligraphy Config.
* If not set it will create a default config.
*/
public static CalligraphyConfig get() {
if (sInstance == null)
sInstance = new CalligraphyConfig(new Builder());
return sInstance;
}
/**
* Is a default font set?
*/
private final boolean mIsFontSet;
/**
* The default Font Path if nothing else is setup.
*/
private final String mFontPath;
/**
* Default Font Path Attr Id to lookup
*/
private final int mAttrId;
/**
* Use Reflection to inject the private factory.
*/
private final boolean mReflection;
/**
* Use Reflection to intercept CustomView inflation with the correct Context.
*/
private final boolean mCustomViewCreation;
/**
* Class Styles. Build from DEFAULT_STYLES and the builder.
*/
private final Map<Class<? extends TextView>, Integer> mClassStyleAttributeMap;
protected CalligraphyConfig(Builder builder) {
mIsFontSet = builder.isFontSet;
mFontPath = builder.fontAssetPath;
mAttrId = builder.attrId;
mReflection = builder.reflection;
mCustomViewCreation = builder.customViewCreation;
final Map<Class<? extends TextView>, Integer> tempMap = new HashMap<>(DEFAULT_STYLES);
tempMap.putAll(builder.mStyleClassMap);
mClassStyleAttributeMap = Collections.unmodifiableMap(tempMap);
}
/**
* @return mFontPath for text views might be null
*/
public String getFontPath() {
return mFontPath;
}
/**
* @return true if set, false if null|empty
*/
boolean isFontSet() {
return mIsFontSet;
}
public boolean isReflection() {
return mReflection;
}
public boolean isCustomViewCreation() {
return mCustomViewCreation;
}
/* default */ Map<Class<? extends TextView>, Integer> getClassStyles() {
return mClassStyleAttributeMap;
}
/**
* @return the custom attrId to look for, -1 if not set.
*/
public int getAttrId() {
return mAttrId;
}
public static class Builder {
/**
* Default AttrID if not set.
*/
public static final int INVALID_ATTR_ID = -1;
/**
* Use Reflection to inject the private factory. Doesn't exist pre HC. so defaults to false.
*/
private boolean reflection = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
/**
* Use Reflection to intercept CustomView inflation with the correct Context.
*/
private boolean customViewCreation = true;
/**
* The fontAttrId to look up the font path from.
*/
private int attrId = R.attr.fontPath;
/**
* Has the user set the default font path.
*/
private boolean isFontSet = false;
/**
* The default fontPath
*/
private String fontAssetPath = null;
/**
* Additional Class Styles. Can be empty.
*/
private Map<Class<? extends TextView>, Integer> mStyleClassMap = new HashMap<>();
/**
* This defaults to R.attr.fontPath. So only override if you want to use your own attrId.
*
* @param fontAssetAttrId the custom attribute to look for fonts in assets.
* @return this builder.
*/
public Builder setFontAttrId(int fontAssetAttrId) {
this.attrId = fontAssetAttrId != INVALID_ATTR_ID ? fontAssetAttrId : INVALID_ATTR_ID;
return this;
}
/**
* Set the default font if you don't define one else where in your styles.
*
* @param defaultFontAssetPath a path to a font file in the assets folder, e.g. "fonts/Roboto-light.ttf",
* passing null will default to the device font-family.
* @return this builder.
*/
public Builder setDefaultFontPath(String defaultFontAssetPath) {
this.isFontSet = !TextUtils.isEmpty(defaultFontAssetPath);
this.fontAssetPath = defaultFontAssetPath;
return this;
}
/**
* <p>Turn of the use of Reflection to inject the private factory.
* This has operational consequences! Please read and understand before disabling.
* <b>This is already disabled on pre Honeycomb devices. (API 11)</b></p>
*
* <p> If you disable this you will need to override your {@link android.app.Activity#onCreateView(android.view.View, String, android.content.Context, android.util.AttributeSet)}
* as this is set as the {@link android.view.LayoutInflater} private factory.</p>
* <br>
* <b> Use the following code in the Activity if you disable FactoryInjection:</b>
* <pre><code>
* {@literal @}Override
* {@literal @}TargetApi(Build.VERSION_CODES.HONEYCOMB)
* public View onCreateView(View parent, String name, Context context, AttributeSet attrs) {
* return CalligraphyContextWrapper.onActivityCreateView(this, parent, super.onCreateView(parent, name, context, attrs), name, context, attrs);
* }
* </code></pre>
*/
public Builder disablePrivateFactoryInjection() {
this.reflection = false;
return this;
}
/**
* Due to the poor inflation order where custom views are created and never returned inside an
* {@code onCreateView(...)} method. We have to create CustomView's at the latest point in the
* overrideable injection flow.
*
* On HoneyComb+ this is inside the {@link android.app.Activity#onCreateView(android.view.View, String, android.content.Context, android.util.AttributeSet)}
* Pre HoneyComb this is in the {@link android.view.LayoutInflater.Factory#onCreateView(String, android.util.AttributeSet)}
*
* We wrap base implementations, so if you LayoutInflater/Factory/Activity creates the
* custom view before we get to this point, your view is used. (Such is the case with the
* TintEditText etc)
*
* The problem is, the native methods pass there parents context to the constructor in a really
* specific place. We have to mimic this in {@link uk.co.chrisjenx.calligraphy.CalligraphyLayoutInflater#createCustomViewInternal(android.view.View, android.view.View, String, android.content.Context, android.util.AttributeSet)}
* To mimic this we have to use reflection as the Class constructor args are hidden to us.
*
* We have discussed other means of doing this but this is the only semi-clean way of doing it.
* (Without having to do proxy classes etc).
*
* Calling this will of course speed up inflation by turning off reflection, but not by much,
* But if you want Calligraphy to inject the correct typeface then you will need to make sure your CustomView's
* are created before reaching the LayoutInflater onViewCreated.
*/
public Builder disableCustomViewInflation() {
this.customViewCreation = false;
return this;
}
/**
* Add a custom style to get looked up. If you use a custom class that has a parent style
* which is not part of the default android styles you will need to add it here.
*
* The Calligraphy inflater is unaware of custom styles in your custom classes. We use
* the class type to look up the style attribute in the theme resources.
*
* So if you had a {@code MyTextField.class} which looked up it's default style as
* {@code R.attr.textFieldStyle} you would add those here.
*
* {@code builder.addCustomStyle(MyTextField.class,R.attr.textFieldStyle}
*
* @param styleClass the class that related to the parent styleResource. null is ignored.
* @param styleResourceAttribute e.g. {@code R.attr.textFieldStyle}, 0 is ignored.
* @return this builder.
*/
public Builder addCustomStyle(final Class<? extends TextView> styleClass, final int styleResourceAttribute) {
if (styleClass == null || styleResourceAttribute == 0) return this;
mStyleClassMap.put(styleClass, styleResourceAttribute);
return this;
}
public CalligraphyConfig build() {
this.isFontSet = !TextUtils.isEmpty(fontAssetPath);
return new CalligraphyConfig(this);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.jvm.JvmInfo;
import java.io.IOException;
/**
*/
@SuppressWarnings("deprecation")
public class Version {
// The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is Beta/RC indicator
// AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release
// the (internal) format of the id is there so we can easily do after/before checks on the id
// NOTE: indexes created with 3.6 use this constant for e.g. analysis chain emulation (imperfect)
public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_4_0_0;
public static final int V_0_18_0_ID = /*00*/180099;
public static final Version V_0_18_0 = new Version(V_0_18_0_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_1_ID = /*00*/180199;
public static final Version V_0_18_1 = new Version(V_0_18_1_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_2_ID = /*00*/180299;
public static final Version V_0_18_2 = new Version(V_0_18_2_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_3_ID = /*00*/180399;
public static final Version V_0_18_3 = new Version(V_0_18_3_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_4_ID = /*00*/180499;
public static final Version V_0_18_4 = new Version(V_0_18_4_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_5_ID = /*00*/180599;
public static final Version V_0_18_5 = new Version(V_0_18_5_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_6_ID = /*00*/180699;
public static final Version V_0_18_6 = new Version(V_0_18_6_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_7_ID = /*00*/180799;
public static final Version V_0_18_7 = new Version(V_0_18_7_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_8_ID = /*00*/180899;
public static final Version V_0_18_8 = new Version(V_0_18_8_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC1_ID = /*00*/190051;
public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC2_ID = /*00*/190052;
public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC3_ID = /*00*/190053;
public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_ID = /*00*/190099;
public static final Version V_0_19_0 = new Version(V_0_19_0_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_1_ID = /*00*/190199;
public static final Version V_0_19_1 = new Version(V_0_19_1_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_2_ID = /*00*/190299;
public static final Version V_0_19_2 = new Version(V_0_19_2_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_3_ID = /*00*/190399;
public static final Version V_0_19_3 = new Version(V_0_19_3_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_4_ID = /*00*/190499;
public static final Version V_0_19_4 = new Version(V_0_19_4_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_5_ID = /*00*/190599;
public static final Version V_0_19_5 = new Version(V_0_19_5_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_6_ID = /*00*/190699;
public static final Version V_0_19_6 = new Version(V_0_19_6_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_7_ID = /*00*/190799;
public static final Version V_0_19_7 = new Version(V_0_19_7_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_8_ID = /*00*/190899;
public static final Version V_0_19_8 = new Version(V_0_19_8_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_9_ID = /*00*/190999;
public static final Version V_0_19_9 = new Version(V_0_19_9_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_10_ID = /*00*/191099;
public static final Version V_0_19_10 = new Version(V_0_19_10_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_11_ID = /*00*/191199;
public static final Version V_0_19_11 = new Version(V_0_19_11_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_12_ID = /*00*/191299;
public static final Version V_0_19_12 = new Version(V_0_19_12_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_13_ID = /*00*/191399;
public static final Version V_0_19_13 = new Version(V_0_19_13_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_RC1_ID = /*00*/200051;
public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_ID = /*00*/200099;
public static final Version V_0_20_0 = new Version(V_0_20_0_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_1_ID = /*00*/200199;
public static final Version V_0_20_1 = new Version(V_0_20_1_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_2_ID = /*00*/200299;
public static final Version V_0_20_2 = new Version(V_0_20_2_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_3_ID = /*00*/200399;
public static final Version V_0_20_3 = new Version(V_0_20_3_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_4_ID = /*00*/200499;
public static final Version V_0_20_4 = new Version(V_0_20_4_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_5_ID = /*00*/200599;
public static final Version V_0_20_5 = new Version(V_0_20_5_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_6_ID = /*00*/200699;
public static final Version V_0_20_6 = new Version(V_0_20_6_ID, false, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_7_ID = /*00*/200799;
public static final Version V_0_20_7 = new Version(V_0_20_7_ID, true, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_Beta1_ID = /*00*/900001;
public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_1);
public static final int V_0_90_0_RC1_ID = /*00*/900051;
public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, false, org.apache.lucene.util.Version.LUCENE_4_1);
public static final int V_0_90_0_RC2_ID = /*00*/900052;
public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, false, org.apache.lucene.util.Version.LUCENE_4_2);
public static final int V_0_90_0_ID = /*00*/900099;
public static final Version V_0_90_0 = new Version(V_0_90_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_2);
public static final int V_0_90_1_ID = /*00*/900199;
public static final Version V_0_90_1 = new Version(V_0_90_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_3);
public static final int V_0_90_2_ID = /*00*/900299;
public static final Version V_0_90_2 = new Version(V_0_90_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_3);
public static final int V_0_90_3_ID = /*00*/900399;
public static final Version V_0_90_3 = new Version(V_0_90_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_4_ID = /*00*/900499;
public static final Version V_0_90_4 = new Version(V_0_90_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_5_ID = /*00*/900599;
public static final Version V_0_90_5 = new Version(V_0_90_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_6_ID = /*00*/900699;
public static final Version V_0_90_6 = new Version(V_0_90_6_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_0_90_7_ID = /*00*/900799;
public static final Version V_0_90_7 = new Version(V_0_90_7_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_0_90_8_ID = /*00*/900899;
public static final Version V_0_90_8 = new Version(V_0_90_8_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_9_ID = /*00*/900999;
public static final Version V_0_90_9 = new Version(V_0_90_9_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_10_ID = /*00*/901099;
public static final Version V_0_90_10 = new Version(V_0_90_10_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_11_ID = /*00*/901199;
public static final Version V_0_90_11 = new Version(V_0_90_11_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_12_ID = /*00*/901299;
public static final Version V_0_90_12 = new Version(V_0_90_12_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_13_ID = /*00*/901399;
public static final Version V_0_90_13 = new Version(V_0_90_13_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_14_ID = /*00*/901499;
public static final Version V_0_90_14 = new Version(V_0_90_14_ID, true, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_Beta1_ID = 1000001;
public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_1_0_0_Beta2_ID = 1000002;
public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_RC1_ID = 1000051;
public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_RC2_ID = 1000052;
public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_ID = 1000099;
public static final Version V_1_0_0 = new Version(V_1_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_1_ID = 1000199;
public static final Version V_1_0_1 = new Version(V_1_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_2_ID = 1000299;
public static final Version V_1_0_2 = new Version(V_1_0_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_3_ID = 1000399;
public static final Version V_1_0_3 = new Version(V_1_0_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_4_ID = 1000499;
public static final Version V_1_0_4 = new Version(V_1_0_4_ID, true, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_1_0_ID = 1010099;
public static final Version V_1_1_0 = new Version(V_1_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_1_1_ID = 1010199;
public static final Version V_1_1_1 = new Version(V_1_1_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_1_2_ID = 1010299;
public static final Version V_1_1_2 = new Version(V_1_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_2_0_ID = 1020099;
public static final Version V_1_2_0 = new Version(V_1_2_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_1_ID = 1020199;
public static final Version V_1_2_1 = new Version(V_1_2_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_2_ID = 1020299;
public static final Version V_1_2_2 = new Version(V_1_2_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_3_ID = 1020399;
public static final Version V_1_2_3 = new Version(V_1_2_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_4_ID = 1020499;
public static final Version V_1_2_4 = new Version(V_1_2_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_5_ID = 1020599;
public static final Version V_1_2_5 = new Version(V_1_2_5_ID, true, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_3_0_ID = 1030099;
public static final Version V_1_3_0 = new Version(V_1_3_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_1_ID = 1030199;
public static final Version V_1_3_1 = new Version(V_1_3_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_2_ID = 1030299;
public static final Version V_1_3_2 = new Version(V_1_3_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_3_ID = 1030399;
public static final Version V_1_3_3 = new Version(V_1_3_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_4_ID = 1030499;
public static final Version V_1_3_4 = new Version(V_1_3_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_5_ID = 1030599;
public static final Version V_1_3_5 = new Version(V_1_3_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_6_ID = 1030699;
public static final Version V_1_3_6 = new Version(V_1_3_6_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_7_ID = 1030799;
public static final Version V_1_3_7 = new Version(V_1_3_7_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_8_ID = 1030899;
public static final Version V_1_3_8 = new Version(V_1_3_8_ID, true, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_4_0_Beta1_ID = 1040001;
public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_1);
public static final int V_1_4_0_ID = 1040099;
public static final Version V_1_4_0 = new Version(V_1_4_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_1_ID = 1040199;
public static final Version V_1_4_1 = new Version(V_1_4_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_2_ID = 1040299;
public static final Version V_1_4_2 = new Version(V_1_4_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_3_ID = 1040399;
public static final Version V_1_4_3 = new Version(V_1_4_3_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_5_0_ID = 1050099;
public static final Version V_1_5_0 = new Version(V_1_5_0_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_3);
public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_1_0);
public static final Version CURRENT = V_2_0_0;
static {
assert CURRENT.luceneVersion.equals(Lucene.VERSION) : "Version must be upgraded to [" + Lucene.VERSION + "] is still set to [" + CURRENT.luceneVersion + "]";
}
public static Version readVersion(StreamInput in) throws IOException {
return fromId(in.readVInt());
}
public static Version fromId(int id) {
switch (id) {
case V_2_0_0_ID:
return V_2_0_0;
case V_1_5_0_ID:
return V_1_5_0;
case V_1_4_3_ID:
return V_1_4_3;
case V_1_4_2_ID:
return V_1_4_2;
case V_1_4_1_ID:
return V_1_4_1;
case V_1_4_0_ID:
return V_1_4_0;
case V_1_4_0_Beta1_ID:
return V_1_4_0_Beta1;
case V_1_3_8_ID:
return V_1_3_8;
case V_1_3_7_ID:
return V_1_3_7;
case V_1_3_6_ID:
return V_1_3_6;
case V_1_3_5_ID:
return V_1_3_5;
case V_1_3_4_ID:
return V_1_3_4;
case V_1_3_3_ID:
return V_1_3_3;
case V_1_3_2_ID:
return V_1_3_2;
case V_1_3_1_ID:
return V_1_3_1;
case V_1_3_0_ID:
return V_1_3_0;
case V_1_2_5_ID:
return V_1_2_5;
case V_1_2_4_ID:
return V_1_2_4;
case V_1_2_3_ID:
return V_1_2_3;
case V_1_2_2_ID:
return V_1_2_2;
case V_1_2_1_ID:
return V_1_2_1;
case V_1_2_0_ID:
return V_1_2_0;
case V_1_1_2_ID:
return V_1_1_2;
case V_1_1_1_ID:
return V_1_1_1;
case V_1_1_0_ID:
return V_1_1_0;
case V_1_0_4_ID:
return V_1_0_4;
case V_1_0_3_ID:
return V_1_0_3;
case V_1_0_2_ID:
return V_1_0_2;
case V_1_0_1_ID:
return V_1_0_1;
case V_1_0_0_ID:
return V_1_0_0;
case V_1_0_0_RC2_ID:
return V_1_0_0_RC2;
case V_1_0_0_RC1_ID:
return V_1_0_0_RC1;
case V_1_0_0_Beta2_ID:
return V_1_0_0_Beta2;
case V_1_0_0_Beta1_ID:
return V_1_0_0_Beta1;
case V_0_90_14_ID:
return V_0_90_14;
case V_0_90_13_ID:
return V_0_90_13;
case V_0_90_12_ID:
return V_0_90_12;
case V_0_90_11_ID:
return V_0_90_11;
case V_0_90_10_ID:
return V_0_90_10;
case V_0_90_9_ID:
return V_0_90_9;
case V_0_90_8_ID:
return V_0_90_8;
case V_0_90_7_ID:
return V_0_90_7;
case V_0_90_6_ID:
return V_0_90_6;
case V_0_90_5_ID:
return V_0_90_5;
case V_0_90_4_ID:
return V_0_90_4;
case V_0_90_3_ID:
return V_0_90_3;
case V_0_90_2_ID:
return V_0_90_2;
case V_0_90_1_ID:
return V_0_90_1;
case V_0_90_0_ID:
return V_0_90_0;
case V_0_90_0_RC2_ID:
return V_0_90_0_RC2;
case V_0_90_0_RC1_ID:
return V_0_90_0_RC1;
case V_0_90_0_Beta1_ID:
return V_0_90_0_Beta1;
case V_0_20_7_ID:
return V_0_20_7;
case V_0_20_6_ID:
return V_0_20_6;
case V_0_20_5_ID:
return V_0_20_5;
case V_0_20_4_ID:
return V_0_20_4;
case V_0_20_3_ID:
return V_0_20_3;
case V_0_20_2_ID:
return V_0_20_2;
case V_0_20_1_ID:
return V_0_20_1;
case V_0_20_0_ID:
return V_0_20_0;
case V_0_20_0_RC1_ID:
return V_0_20_0_RC1;
case V_0_19_0_RC1_ID:
return V_0_19_0_RC1;
case V_0_19_0_RC2_ID:
return V_0_19_0_RC2;
case V_0_19_0_RC3_ID:
return V_0_19_0_RC3;
case V_0_19_0_ID:
return V_0_19_0;
case V_0_19_1_ID:
return V_0_19_1;
case V_0_19_2_ID:
return V_0_19_2;
case V_0_19_3_ID:
return V_0_19_3;
case V_0_19_4_ID:
return V_0_19_4;
case V_0_19_5_ID:
return V_0_19_5;
case V_0_19_6_ID:
return V_0_19_6;
case V_0_19_7_ID:
return V_0_19_7;
case V_0_19_8_ID:
return V_0_19_8;
case V_0_19_9_ID:
return V_0_19_9;
case V_0_19_10_ID:
return V_0_19_10;
case V_0_19_11_ID:
return V_0_19_11;
case V_0_19_12_ID:
return V_0_19_12;
case V_0_19_13_ID:
return V_0_19_13;
case V_0_18_0_ID:
return V_0_18_0;
case V_0_18_1_ID:
return V_0_18_1;
case V_0_18_2_ID:
return V_0_18_2;
case V_0_18_3_ID:
return V_0_18_3;
case V_0_18_4_ID:
return V_0_18_4;
case V_0_18_5_ID:
return V_0_18_5;
case V_0_18_6_ID:
return V_0_18_6;
case V_0_18_7_ID:
return V_0_18_7;
case V_0_18_8_ID:
return V_0_18_8;
default:
return new Version(id, false, Lucene.VERSION);
}
}
/**
* Return the {@link Version} of Elasticsearch that has been used to create an index given its settings.
*
* @throws ElasticsearchIllegalStateException if the given index settings doesn't contain a value for the key {@value IndexMetaData#SETTING_VERSION_CREATED}
*/
public static Version indexCreated(Settings indexSettings) {
final Version indexVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null);
if (indexVersion == null) {
throw new ElasticsearchIllegalStateException("[" + IndexMetaData.SETTING_VERSION_CREATED + "] is not present in the index settings for index with uuid: [" + indexSettings.get(IndexMetaData.SETTING_UUID) + "]");
}
return indexVersion;
}
public static void writeVersion(Version version, StreamOutput out) throws IOException {
out.writeVInt(version.id);
}
/**
* Returns the smallest version between the 2.
*/
public static Version smallest(Version version1, Version version2) {
return version1.id < version2.id ? version1 : version2;
}
/**
* Returns the version given its string representation, current version if the argument is null or empty
*/
public static Version fromString(String version) {
if (!Strings.hasLength(version)) {
return Version.CURRENT;
}
final boolean snapshot;
if (snapshot = version.endsWith("-SNAPSHOT")) {
version = version.substring(0, version.length() - 9);
}
String[] parts = version.split("\\.");
if (parts.length < 3 || parts.length > 4) {
throw new IllegalArgumentException("the version needs to contain major, minor and revision, and optionally the build");
}
try {
//we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo
final int major = Integer.parseInt(parts[0]) * 1000000;
final int minor = Integer.parseInt(parts[1]) * 10000;
final int revision = Integer.parseInt(parts[2]) * 100;
int build = 99;
if (parts.length == 4) {
String buildStr = parts[3];
if (buildStr.startsWith("Beta")) {
build = Integer.parseInt(buildStr.substring(4));
}
if (buildStr.startsWith("RC")) {
build = Integer.parseInt(buildStr.substring(2)) + 50;
}
}
final Version versionFromId = fromId(major + minor + revision + build);
if (snapshot != versionFromId.snapshot()) {
return new Version(versionFromId.id, snapshot, versionFromId.luceneVersion);
}
return versionFromId;
} catch (NumberFormatException e) {
throw new IllegalArgumentException("unable to parse version " + version, e);
}
}
public final int id;
public final byte major;
public final byte minor;
public final byte revision;
public final byte build;
public final Boolean snapshot;
public final org.apache.lucene.util.Version luceneVersion;
Version(int id, boolean snapshot, org.apache.lucene.util.Version luceneVersion) {
this.id = id;
this.major = (byte) ((id / 1000000) % 100);
this.minor = (byte) ((id / 10000) % 100);
this.revision = (byte) ((id / 100) % 100);
this.build = (byte) (id % 100);
this.snapshot = snapshot;
this.luceneVersion = luceneVersion;
}
public boolean snapshot() {
return snapshot;
}
public boolean after(Version version) {
return version.id < id;
}
public boolean onOrAfter(Version version) {
return version.id <= id;
}
public boolean before(Version version) {
return version.id > id;
}
public boolean onOrBefore(Version version) {
return version.id >= id;
}
/**
* Returns the minimum compatible version based on the current
* version. Ie a node needs to have at least the return version in order
* to communicate with a node running the current version. The returned version
* is in most of the cases the smallest major version release unless the current version
* is a beta or RC release then the version itself is returned.
*/
public Version minimumCompatibilityVersion() {
return Version.smallest(this, fromId(major * 1000000 + 99));
}
/**
* Just the version number (without -SNAPSHOT if snapshot).
*/
public String number() {
StringBuilder sb = new StringBuilder();
sb.append(major).append('.').append(minor).append('.').append(revision);
if (build < 50) {
sb.append(".Beta").append(build);
} else if (build < 99) {
sb.append(".RC").append(build - 50);
}
return sb.toString();
}
public static void main(String[] args) {
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.hashShort() + "/" + Build.CURRENT.timestamp() + ", JVM: " + JvmInfo.jvmInfo().version());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(number());
if (snapshot()) {
sb.append("-SNAPSHOT");
}
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Version version = (Version) o;
if (id != version.id) {
return false;
}
return true;
}
@Override
public int hashCode() {
return id;
}
public static class Module extends AbstractModule {
private final Version version;
public Module(Version version) {
this.version = version;
}
@Override
protected void configure() {
bind(Version.class).toInstance(version);
}
}
}
| |
package org.apache.lucene.codecs.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsConsumer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.codecs.TermsConsumer;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.fst.Builder;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.packed.PackedInts;
// TODO: would be nice to somehow allow this to act like
// InstantiatedIndex, by never writing to disk; ie you write
// to this Codec in RAM only and then when you open a reader
// it pulls the FST directly from what you wrote w/o going
// to disk.
/** Stores terms & postings (docs, positions, payloads) in
* RAM, using an FST.
*
* <p>Note that this codec implements advance as a linear
* scan! This means if you store large fields in here,
* queries that rely on advance will (AND BooleanQuery,
* PhraseQuery) will be relatively slow!
*
* @lucene.experimental */
// TODO: Maybe name this 'Cached' or something to reflect
// the reality that it is actually written to disk, but
// loads itself in ram?
public final class MemoryPostingsFormat extends PostingsFormat {
private final boolean doPackFST;
private final float acceptableOverheadRatio;
public MemoryPostingsFormat() {
this(false, PackedInts.DEFAULT);
}
/**
* Create MemoryPostingsFormat, specifying advanced FST options.
* @param doPackFST true if a packed FST should be built.
* NOTE: packed FSTs are limited to ~2.1 GB of postings.
* @param acceptableOverheadRatio allowable overhead for packed ints
* during FST construction.
*/
public MemoryPostingsFormat(boolean doPackFST, float acceptableOverheadRatio) {
super("Memory");
this.doPackFST = doPackFST;
this.acceptableOverheadRatio = acceptableOverheadRatio;
}
@Override
public String toString() {
return "PostingsFormat(name=" + getName() + " doPackFST= " + doPackFST + ")";
}
private final static class TermsWriter extends TermsConsumer {
private final IndexOutput out;
private final FieldInfo field;
private final Builder<BytesRef> builder;
private final ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
private final boolean doPackFST;
private final float acceptableOverheadRatio;
private int termCount;
public TermsWriter(IndexOutput out, FieldInfo field, boolean doPackFST, float acceptableOverheadRatio) {
this.out = out;
this.field = field;
this.doPackFST = doPackFST;
this.acceptableOverheadRatio = acceptableOverheadRatio;
builder = new Builder<BytesRef>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, Integer.MAX_VALUE, outputs, null, doPackFST, acceptableOverheadRatio, true, 15);
}
private class PostingsWriter extends PostingsConsumer {
private int lastDocID;
private int lastPos;
private int lastPayloadLen;
// NOTE: not private so we don't pay access check at runtime:
int docCount;
RAMOutputStream buffer = new RAMOutputStream();
int lastOffsetLength;
int lastOffset;
@Override
public void startDoc(int docID, int termDocFreq) throws IOException {
//System.out.println(" startDoc docID=" + docID + " freq=" + termDocFreq);
final int delta = docID - lastDocID;
assert docID == 0 || delta > 0;
lastDocID = docID;
docCount++;
if (field.getIndexOptions() == IndexOptions.DOCS_ONLY) {
buffer.writeVInt(delta);
} else if (termDocFreq == 1) {
buffer.writeVInt((delta<<1) | 1);
} else {
buffer.writeVInt(delta<<1);
assert termDocFreq > 0;
buffer.writeVInt(termDocFreq);
}
lastPos = 0;
lastOffset = 0;
}
@Override
public void addPosition(int pos, BytesRef payload, int startOffset, int endOffset) throws IOException {
assert payload == null || field.hasPayloads();
//System.out.println(" addPos pos=" + pos + " payload=" + payload);
final int delta = pos - lastPos;
assert delta >= 0;
lastPos = pos;
int payloadLen = 0;
if (field.hasPayloads()) {
payloadLen = payload == null ? 0 : payload.length;
if (payloadLen != lastPayloadLen) {
lastPayloadLen = payloadLen;
buffer.writeVInt((delta<<1)|1);
buffer.writeVInt(payloadLen);
} else {
buffer.writeVInt(delta<<1);
}
} else {
buffer.writeVInt(delta);
}
if (field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) {
// don't use startOffset - lastEndOffset, because this creates lots of negative vints for synonyms,
// and the numbers aren't that much smaller anyways.
int offsetDelta = startOffset - lastOffset;
int offsetLength = endOffset - startOffset;
if (offsetLength != lastOffsetLength) {
buffer.writeVInt(offsetDelta << 1 | 1);
buffer.writeVInt(offsetLength);
} else {
buffer.writeVInt(offsetDelta << 1);
}
lastOffset = startOffset;
lastOffsetLength = offsetLength;
}
if (payloadLen > 0) {
buffer.writeBytes(payload.bytes, payload.offset, payloadLen);
}
}
@Override
public void finishDoc() {
}
public PostingsWriter reset() {
assert buffer.getFilePointer() == 0;
lastDocID = 0;
docCount = 0;
lastPayloadLen = 0;
// force first offset to write its length
lastOffsetLength = -1;
return this;
}
}
private final PostingsWriter postingsWriter = new PostingsWriter();
@Override
public PostingsConsumer startTerm(BytesRef text) {
//System.out.println(" startTerm term=" + text.utf8ToString());
return postingsWriter.reset();
}
private final RAMOutputStream buffer2 = new RAMOutputStream();
private final BytesRef spare = new BytesRef();
private byte[] finalBuffer = new byte[128];
private final IntsRef scratchIntsRef = new IntsRef();
@Override
public void finishTerm(BytesRef text, TermStats stats) throws IOException {
assert postingsWriter.docCount == stats.docFreq;
assert buffer2.getFilePointer() == 0;
buffer2.writeVInt(stats.docFreq);
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) {
buffer2.writeVLong(stats.totalTermFreq-stats.docFreq);
}
int pos = (int) buffer2.getFilePointer();
buffer2.writeTo(finalBuffer, 0);
buffer2.reset();
final int totalBytes = pos + (int) postingsWriter.buffer.getFilePointer();
if (totalBytes > finalBuffer.length) {
finalBuffer = ArrayUtil.grow(finalBuffer, totalBytes);
}
postingsWriter.buffer.writeTo(finalBuffer, pos);
postingsWriter.buffer.reset();
spare.bytes = finalBuffer;
spare.length = totalBytes;
//System.out.println(" finishTerm term=" + text.utf8ToString() + " " + totalBytes + " bytes totalTF=" + stats.totalTermFreq);
//for(int i=0;i<totalBytes;i++) {
// System.out.println(" " + Integer.toHexString(finalBuffer[i]&0xFF));
//}
builder.add(Util.toIntsRef(text, scratchIntsRef), BytesRef.deepCopyOf(spare));
termCount++;
}
@Override
public void finish(long sumTotalTermFreq, long sumDocFreq, int docCount) throws IOException {
if (termCount > 0) {
out.writeVInt(termCount);
out.writeVInt(field.number);
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) {
out.writeVLong(sumTotalTermFreq);
}
out.writeVLong(sumDocFreq);
out.writeVInt(docCount);
FST<BytesRef> fst = builder.finish();
fst.save(out);
//System.out.println("finish field=" + field.name + " fp=" + out.getFilePointer());
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
}
private static String EXTENSION = "ram";
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION);
final IndexOutput out = state.directory.createOutput(fileName, state.context);
return new FieldsConsumer() {
@Override
public TermsConsumer addField(FieldInfo field) {
//System.out.println("\naddField field=" + field.name);
return new TermsWriter(out, field, doPackFST, acceptableOverheadRatio);
}
@Override
public void close() throws IOException {
// EOF marker:
try {
out.writeVInt(0);
} finally {
out.close();
}
}
};
}
private final static class FSTDocsEnum extends DocsEnum {
private final IndexOptions indexOptions;
private final boolean storePayloads;
private byte[] buffer = new byte[16];
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
private Bits liveDocs;
private int docUpto;
private int docID = -1;
private int accum;
private int freq;
private int payloadLen;
private int numDocs;
public FSTDocsEnum(IndexOptions indexOptions, boolean storePayloads) {
this.indexOptions = indexOptions;
this.storePayloads = storePayloads;
}
public boolean canReuse(IndexOptions indexOptions, boolean storePayloads) {
return indexOptions == this.indexOptions && storePayloads == this.storePayloads;
}
public FSTDocsEnum reset(BytesRef bufferIn, Bits liveDocs, int numDocs) {
assert numDocs > 0;
if (buffer.length < bufferIn.length) {
buffer = ArrayUtil.grow(buffer, bufferIn.length);
}
in.reset(buffer, 0, bufferIn.length);
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length);
this.liveDocs = liveDocs;
docID = -1;
accum = 0;
docUpto = 0;
freq = 1;
payloadLen = 0;
this.numDocs = numDocs;
return this;
}
@Override
public int nextDoc() {
while(true) {
//System.out.println(" nextDoc cycle docUpto=" + docUpto + " numDocs=" + numDocs + " fp=" + in.getPosition() + " this=" + this);
if (docUpto == numDocs) {
// System.out.println(" END");
return docID = NO_MORE_DOCS;
}
docUpto++;
if (indexOptions == IndexOptions.DOCS_ONLY) {
accum += in.readVInt();
} else {
final int code = in.readVInt();
accum += code >>> 1;
//System.out.println(" docID=" + accum + " code=" + code);
if ((code & 1) != 0) {
freq = 1;
} else {
freq = in.readVInt();
assert freq > 0;
}
if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) {
// Skip positions/payloads
for(int posUpto=0;posUpto<freq;posUpto++) {
if (!storePayloads) {
in.readVInt();
} else {
final int posCode = in.readVInt();
if ((posCode & 1) != 0) {
payloadLen = in.readVInt();
}
in.skipBytes(payloadLen);
}
}
} else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
// Skip positions/offsets/payloads
for(int posUpto=0;posUpto<freq;posUpto++) {
int posCode = in.readVInt();
if (storePayloads && ((posCode & 1) != 0)) {
payloadLen = in.readVInt();
}
if ((in.readVInt() & 1) != 0) {
// new offset length
in.readVInt();
}
if (storePayloads) {
in.skipBytes(payloadLen);
}
}
}
}
if (liveDocs == null || liveDocs.get(accum)) {
//System.out.println(" return docID=" + accum + " freq=" + freq);
return (docID = accum);
}
}
}
@Override
public int docID() {
return docID;
}
@Override
public int advance(int target) throws IOException {
// TODO: we could make more efficient version, but, it
// should be rare that this will matter in practice
// since usually apps will not store "big" fields in
// this codec!
return slowAdvance(target);
}
@Override
public int freq() {
return freq;
}
@Override
public long cost() {
return numDocs;
}
}
private final static class FSTDocsAndPositionsEnum extends DocsAndPositionsEnum {
private final boolean storePayloads;
private byte[] buffer = new byte[16];
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
private Bits liveDocs;
private int docUpto;
private int docID = -1;
private int accum;
private int freq;
private int numDocs;
private int posPending;
private int payloadLength;
final boolean storeOffsets;
int offsetLength;
int startOffset;
private int pos;
private final BytesRef payload = new BytesRef();
public FSTDocsAndPositionsEnum(boolean storePayloads, boolean storeOffsets) {
this.storePayloads = storePayloads;
this.storeOffsets = storeOffsets;
}
public boolean canReuse(boolean storePayloads, boolean storeOffsets) {
return storePayloads == this.storePayloads && storeOffsets == this.storeOffsets;
}
public FSTDocsAndPositionsEnum reset(BytesRef bufferIn, Bits liveDocs, int numDocs) {
assert numDocs > 0;
// System.out.println("D&P reset bytes this=" + this);
// for(int i=bufferIn.offset;i<bufferIn.length;i++) {
// System.out.println(" " + Integer.toHexString(bufferIn.bytes[i]&0xFF));
// }
if (buffer.length < bufferIn.length) {
buffer = ArrayUtil.grow(buffer, bufferIn.length);
}
in.reset(buffer, 0, bufferIn.length - bufferIn.offset);
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length);
this.liveDocs = liveDocs;
docID = -1;
accum = 0;
docUpto = 0;
payload.bytes = buffer;
payloadLength = 0;
this.numDocs = numDocs;
posPending = 0;
startOffset = storeOffsets ? 0 : -1; // always return -1 if no offsets are stored
offsetLength = 0;
return this;
}
@Override
public int nextDoc() {
while (posPending > 0) {
nextPosition();
}
while(true) {
//System.out.println(" nextDoc cycle docUpto=" + docUpto + " numDocs=" + numDocs + " fp=" + in.getPosition() + " this=" + this);
if (docUpto == numDocs) {
//System.out.println(" END");
return docID = NO_MORE_DOCS;
}
docUpto++;
final int code = in.readVInt();
accum += code >>> 1;
if ((code & 1) != 0) {
freq = 1;
} else {
freq = in.readVInt();
assert freq > 0;
}
if (liveDocs == null || liveDocs.get(accum)) {
pos = 0;
startOffset = storeOffsets ? 0 : -1;
posPending = freq;
//System.out.println(" return docID=" + accum + " freq=" + freq);
return (docID = accum);
}
// Skip positions
for(int posUpto=0;posUpto<freq;posUpto++) {
if (!storePayloads) {
in.readVInt();
} else {
final int skipCode = in.readVInt();
if ((skipCode & 1) != 0) {
payloadLength = in.readVInt();
//System.out.println(" new payloadLen=" + payloadLength);
}
}
if (storeOffsets) {
if ((in.readVInt() & 1) != 0) {
// new offset length
offsetLength = in.readVInt();
}
}
if (storePayloads) {
in.skipBytes(payloadLength);
}
}
}
}
@Override
public int nextPosition() {
//System.out.println(" nextPos storePayloads=" + storePayloads + " this=" + this);
assert posPending > 0;
posPending--;
if (!storePayloads) {
pos += in.readVInt();
} else {
final int code = in.readVInt();
pos += code >>> 1;
if ((code & 1) != 0) {
payloadLength = in.readVInt();
//System.out.println(" new payloadLen=" + payloadLength);
//} else {
//System.out.println(" same payloadLen=" + payloadLength);
}
}
if (storeOffsets) {
int offsetCode = in.readVInt();
if ((offsetCode & 1) != 0) {
// new offset length
offsetLength = in.readVInt();
}
startOffset += offsetCode >>> 1;
}
if (storePayloads) {
payload.offset = in.getPosition();
in.skipBytes(payloadLength);
payload.length = payloadLength;
}
//System.out.println(" pos=" + pos + " payload=" + payload + " fp=" + in.getPosition());
return pos;
}
@Override
public int startOffset() {
return startOffset;
}
@Override
public int endOffset() {
return startOffset + offsetLength;
}
@Override
public BytesRef getPayload() {
return payload.length > 0 ? payload : null;
}
@Override
public int docID() {
return docID;
}
@Override
public int advance(int target) throws IOException {
// TODO: we could make more efficient version, but, it
// should be rare that this will matter in practice
// since usually apps will not store "big" fields in
// this codec!
return slowAdvance(target);
}
@Override
public int freq() {
return freq;
}
@Override
public long cost() {
return numDocs;
}
}
private final static class FSTTermsEnum extends TermsEnum {
private final FieldInfo field;
private final BytesRefFSTEnum<BytesRef> fstEnum;
private final ByteArrayDataInput buffer = new ByteArrayDataInput();
private boolean didDecode;
private int docFreq;
private long totalTermFreq;
private BytesRefFSTEnum.InputOutput<BytesRef> current;
private BytesRef postingsSpare = new BytesRef();
public FSTTermsEnum(FieldInfo field, FST<BytesRef> fst) {
this.field = field;
fstEnum = new BytesRefFSTEnum<BytesRef>(fst);
}
private void decodeMetaData() {
if (!didDecode) {
buffer.reset(current.output.bytes, current.output.offset, current.output.length);
docFreq = buffer.readVInt();
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) {
totalTermFreq = docFreq + buffer.readVLong();
} else {
totalTermFreq = -1;
}
postingsSpare.bytes = current.output.bytes;
postingsSpare.offset = buffer.getPosition();
postingsSpare.length = current.output.length - (buffer.getPosition() - current.output.offset);
//System.out.println(" df=" + docFreq + " totTF=" + totalTermFreq + " offset=" + buffer.getPosition() + " len=" + current.output.length);
didDecode = true;
}
}
@Override
public boolean seekExact(BytesRef text) throws IOException {
//System.out.println("te.seekExact text=" + field.name + ":" + text.utf8ToString() + " this=" + this);
current = fstEnum.seekExact(text);
didDecode = false;
return current != null;
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
//System.out.println("te.seek text=" + field.name + ":" + text.utf8ToString() + " this=" + this);
current = fstEnum.seekCeil(text);
if (current == null) {
return SeekStatus.END;
} else {
// System.out.println(" got term=" + current.input.utf8ToString());
// for(int i=0;i<current.output.length;i++) {
// System.out.println(" " + Integer.toHexString(current.output.bytes[i]&0xFF));
// }
didDecode = false;
if (text.equals(current.input)) {
//System.out.println(" found!");
return SeekStatus.FOUND;
} else {
//System.out.println(" not found: " + current.input.utf8ToString());
return SeekStatus.NOT_FOUND;
}
}
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
decodeMetaData();
FSTDocsEnum docsEnum;
if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
docsEnum = new FSTDocsEnum(field.getIndexOptions(), field.hasPayloads());
} else {
docsEnum = (FSTDocsEnum) reuse;
if (!docsEnum.canReuse(field.getIndexOptions(), field.hasPayloads())) {
docsEnum = new FSTDocsEnum(field.getIndexOptions(), field.hasPayloads());
}
}
return docsEnum.reset(this.postingsSpare, liveDocs, docFreq);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) {
boolean hasOffsets = field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
if (field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
return null;
}
decodeMetaData();
FSTDocsAndPositionsEnum docsAndPositionsEnum;
if (reuse == null || !(reuse instanceof FSTDocsAndPositionsEnum)) {
docsAndPositionsEnum = new FSTDocsAndPositionsEnum(field.hasPayloads(), hasOffsets);
} else {
docsAndPositionsEnum = (FSTDocsAndPositionsEnum) reuse;
if (!docsAndPositionsEnum.canReuse(field.hasPayloads(), hasOffsets)) {
docsAndPositionsEnum = new FSTDocsAndPositionsEnum(field.hasPayloads(), hasOffsets);
}
}
//System.out.println("D&P reset this=" + this);
return docsAndPositionsEnum.reset(postingsSpare, liveDocs, docFreq);
}
@Override
public BytesRef term() {
return current.input;
}
@Override
public BytesRef next() throws IOException {
//System.out.println("te.next");
current = fstEnum.next();
if (current == null) {
//System.out.println(" END");
return null;
}
didDecode = false;
//System.out.println(" term=" + field.name + ":" + current.input.utf8ToString());
return current.input;
}
@Override
public int docFreq() {
decodeMetaData();
return docFreq;
}
@Override
public long totalTermFreq() {
decodeMetaData();
return totalTermFreq;
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public void seekExact(long ord) {
// NOTE: we could add this...
throw new UnsupportedOperationException();
}
@Override
public long ord() {
// NOTE: we could add this...
throw new UnsupportedOperationException();
}
}
private final static class TermsReader extends Terms {
private final long sumTotalTermFreq;
private final long sumDocFreq;
private final int docCount;
private final int termCount;
private FST<BytesRef> fst;
private final ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
private final FieldInfo field;
public TermsReader(FieldInfos fieldInfos, IndexInput in, int termCount) throws IOException {
this.termCount = termCount;
final int fieldNumber = in.readVInt();
field = fieldInfos.fieldInfo(fieldNumber);
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) {
sumTotalTermFreq = in.readVLong();
} else {
sumTotalTermFreq = -1;
}
sumDocFreq = in.readVLong();
docCount = in.readVInt();
fst = new FST<BytesRef>(in, outputs);
}
@Override
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
@Override
public long getSumDocFreq() {
return sumDocFreq;
}
@Override
public int getDocCount() {
return docCount;
}
@Override
public long size() {
return termCount;
}
@Override
public TermsEnum iterator(TermsEnum reuse) {
return new FSTTermsEnum(field, fst);
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public boolean hasFreqs() {
return field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
}
@Override
public boolean hasOffsets() {
return field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
}
@Override
public boolean hasPositions() {
return field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
}
@Override
public boolean hasPayloads() {
return field.hasPayloads();
}
public long ramBytesUsed() {
return ((fst!=null) ? fst.sizeInBytes() : 0);
}
}
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION);
final IndexInput in = state.directory.openInput(fileName, IOContext.READONCE);
final SortedMap<String,TermsReader> fields = new TreeMap<String,TermsReader>();
try {
while(true) {
final int termCount = in.readVInt();
if (termCount == 0) {
break;
}
final TermsReader termsReader = new TermsReader(state.fieldInfos, in, termCount);
// System.out.println("load field=" + termsReader.field.name);
fields.put(termsReader.field.name, termsReader);
}
} finally {
in.close();
}
return new FieldsProducer() {
@Override
public Iterator<String> iterator() {
return Collections.unmodifiableSet(fields.keySet()).iterator();
}
@Override
public Terms terms(String field) {
return fields.get(field);
}
@Override
public int size() {
return fields.size();
}
@Override
public void close() {
// Drop ref to FST:
for(TermsReader termsReader : fields.values()) {
termsReader.fst = null;
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;
for(Map.Entry<String,TermsReader> entry: fields.entrySet()) {
sizeInBytes += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR);
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;
}
};
}
}
| |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.concurrent;
import io.netty.util.Signal;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.util.concurrent.TimeUnit;
import static java.util.concurrent.TimeUnit.*;
public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(DefaultPromise.class);
private static final int MAX_LISTENER_STACK_DEPTH = 8;
private static final ThreadLocal<Integer> LISTENER_STACK_DEPTH = new ThreadLocal<Integer>() {
@Override
protected Integer initialValue() {
return 0;
}
};
private static final Signal SUCCESS = new Signal(DefaultPromise.class.getName() + ".SUCCESS");
private final EventExecutor executor;
private volatile Object result;
private Object listeners; // Can be ChannelFutureListener or DefaultFutureListeners
private short waiters;
/**
* Creates a new instance.
*
* It is preferable to use {@link EventExecutor#newPromise()} to create a new promise
*
* @param executor
* the {@link EventExecutor} which is used to notify the promise once it is complete
*/
public DefaultPromise(EventExecutor executor) {
if (executor == null) {
throw new NullPointerException("executor");
}
this.executor = executor;
}
protected DefaultPromise() {
// only for subclasses
executor = null;
}
protected EventExecutor executor() {
return executor;
}
@Override
public boolean isDone() {
return result != null;
}
@Override
public boolean isSuccess() {
Object result = this.result;
if (result == null) {
return false;
}
return !(result instanceof CauseHolder);
}
@Override
public Throwable cause() {
Object cause = result;
if (cause instanceof CauseHolder) {
return ((CauseHolder) cause).cause;
}
return null;
}
@Override
public Promise<V> addListener(GenericFutureListener<? extends Future<V>> listener) {
if (listener == null) {
throw new NullPointerException("listener");
}
if (isDone()) {
notifyListener(executor(), this, listener);
return this;
}
synchronized (this) {
if (!isDone()) {
if (listeners == null) {
listeners = listener;
} else {
if (listeners instanceof DefaultFutureListeners) {
((DefaultFutureListeners) listeners).add(listener);
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> firstListener =
(GenericFutureListener<? extends Future<V>>) listeners;
listeners = new DefaultFutureListeners(firstListener, listener);
}
}
return this;
}
}
notifyListener(executor(), this, listener);
return this;
}
@Override
public Promise<V> addListeners(GenericFutureListener<? extends Future<V>>... listeners) {
if (listeners == null) {
throw new NullPointerException("listeners");
}
for (GenericFutureListener<? extends Future<V>> l: listeners) {
if (l == null) {
break;
}
addListener(l);
}
return this;
}
@Override
public Promise<V> removeListener(GenericFutureListener<? extends Future<V>> listener) {
if (listener == null) {
throw new NullPointerException("listener");
}
if (isDone()) {
return this;
}
synchronized (this) {
if (!isDone()) {
if (listeners instanceof DefaultFutureListeners) {
((DefaultFutureListeners) listeners).remove(listener);
} else if (listeners == listener) {
listeners = null;
}
}
}
return this;
}
@Override
public Promise<V> removeListeners(GenericFutureListener<? extends Future<V>>... listeners) {
if (listeners == null) {
throw new NullPointerException("listeners");
}
for (GenericFutureListener<? extends Future<V>> l: listeners) {
if (l == null) {
break;
}
removeListener(l);
}
return this;
}
@Override
public Promise<V> sync() throws InterruptedException {
await();
rethrowIfFailed();
return this;
}
@Override
public Promise<V> syncUninterruptibly() {
awaitUninterruptibly();
rethrowIfFailed();
return this;
}
private void rethrowIfFailed() {
Throwable cause = cause();
if (cause == null) {
return;
}
PlatformDependent.throwException(cause);
}
@Override
public Promise<V> await() throws InterruptedException {
if (isDone()) {
return this;
}
if (Thread.interrupted()) {
throw new InterruptedException();
}
synchronized (this) {
while (!isDone()) {
checkDeadLock();
incWaiters();
try {
wait();
} finally {
decWaiters();
}
}
}
return this;
}
@Override
public boolean await(long timeout, TimeUnit unit)
throws InterruptedException {
return await0(unit.toNanos(timeout), true);
}
@Override
public boolean await(long timeoutMillis) throws InterruptedException {
return await0(MILLISECONDS.toNanos(timeoutMillis), true);
}
@Override
public Promise<V> awaitUninterruptibly() {
if (isDone()) {
return this;
}
boolean interrupted = false;
synchronized (this) {
while (!isDone()) {
checkDeadLock();
incWaiters();
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
} finally {
decWaiters();
}
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
return this;
}
@Override
public boolean awaitUninterruptibly(long timeout, TimeUnit unit) {
try {
return await0(unit.toNanos(timeout), false);
} catch (InterruptedException e) {
throw new InternalError();
}
}
@Override
public boolean awaitUninterruptibly(long timeoutMillis) {
try {
return await0(MILLISECONDS.toNanos(timeoutMillis), false);
} catch (InterruptedException e) {
throw new InternalError();
}
}
private boolean await0(long timeoutNanos, boolean interruptable) throws InterruptedException {
if (isDone()) {
return true;
}
if (timeoutNanos <= 0) {
return isDone();
}
if (interruptable && Thread.interrupted()) {
throw new InterruptedException();
}
long startTime = timeoutNanos <= 0 ? 0 : System.nanoTime();
long waitTime = timeoutNanos;
boolean interrupted = false;
try {
synchronized (this) {
if (isDone()) {
return true;
}
if (waitTime <= 0) {
return isDone();
}
checkDeadLock();
incWaiters();
try {
for (;;) {
try {
wait(waitTime / 1000000, (int) (waitTime % 1000000));
} catch (InterruptedException e) {
if (interruptable) {
throw e;
} else {
interrupted = true;
}
}
if (isDone()) {
return true;
} else {
waitTime = timeoutNanos - (System.nanoTime() - startTime);
if (waitTime <= 0) {
return isDone();
}
}
}
} finally {
decWaiters();
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
/**
* Do deadlock checks
*/
protected void checkDeadLock() {
EventExecutor e = executor();
if (e != null && e.inEventLoop()) {
throw new BlockingOperationException();
}
}
@Override
public Promise<V> setSuccess(V result) {
if (setSuccess0(result)) {
notifyListeners();
return this;
}
throw new IllegalStateException("complete already");
}
@Override
public boolean trySuccess(V result) {
if (setSuccess0(result)) {
notifyListeners();
return true;
}
return false;
}
@Override
public Promise<V> setFailure(Throwable cause) {
if (setFailure0(cause)) {
notifyListeners();
return this;
}
throw new IllegalStateException("complete already", cause);
}
@Override
public boolean tryFailure(Throwable cause) {
if (setFailure0(cause)) {
notifyListeners();
return true;
}
return false;
}
private boolean setFailure0(Throwable cause) {
if (isDone()) {
return false;
}
synchronized (this) {
// Allow only once.
if (isDone()) {
return false;
}
result = new CauseHolder(cause);
if (hasWaiters()) {
notifyAll();
}
}
return true;
}
private boolean setSuccess0(V result) {
if (isDone()) {
return false;
}
synchronized (this) {
// Allow only once.
if (isDone()) {
return false;
}
if (result == null) {
this.result = SUCCESS;
} else {
this.result = result;
}
if (hasWaiters()) {
notifyAll();
}
}
return true;
}
@Override
@SuppressWarnings("unchecked")
public V getNow() {
Object result = this.result;
if (result instanceof CauseHolder || result == SUCCESS) {
return null;
}
return (V) result;
}
private boolean hasWaiters() {
return waiters > 0;
}
private void incWaiters() {
if (waiters == Short.MAX_VALUE) {
throw new IllegalStateException("too many waiters");
}
waiters ++;
}
private void decWaiters() {
waiters --;
}
private void notifyListeners() {
// This method doesn't need synchronization because:
// 1) This method is always called after synchronized (this) block.
// Hence any listener list modification happens-before this method.
// 2) This method is called only when 'done' is true. Once 'done'
// becomes true, the listener list is never modified - see add/removeListener()
Object listeners = this.listeners;
if (listeners == null) {
return;
}
this.listeners = null;
EventExecutor executor = executor();
if (executor.inEventLoop()) {
if (listeners instanceof DefaultFutureListeners) {
notifyListeners0(this, (DefaultFutureListeners) listeners);
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> l =
(GenericFutureListener<? extends Future<V>>) listeners;
notifyListener0(this, l);
}
} else {
try {
if (listeners instanceof DefaultFutureListeners) {
final DefaultFutureListeners dfl = (DefaultFutureListeners) listeners;
executor.execute(new Runnable() {
@Override
public void run() {
notifyListeners0(DefaultPromise.this, dfl);
}
});
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> l =
(GenericFutureListener<? extends Future<V>>) listeners;
executor.execute(new Runnable() {
@Override
public void run() {
notifyListener0(DefaultPromise.this, l);
}
});
}
} catch (Throwable t) {
logger.error("Failed to notify listener(s). Event loop shut down?", t);
}
}
}
private static void notifyListeners0(Future<?> future, DefaultFutureListeners listeners) {
final GenericFutureListener<?>[] a = listeners.listeners();
final int size = listeners.size();
for (int i = 0; i < size; i ++) {
notifyListener0(future, a[i]);
}
}
protected static void notifyListener(
final EventExecutor eventExecutor, final Future<?> future, final GenericFutureListener<?> l) {
if (eventExecutor.inEventLoop()) {
final Integer stackDepth = LISTENER_STACK_DEPTH.get();
if (stackDepth < MAX_LISTENER_STACK_DEPTH) {
LISTENER_STACK_DEPTH.set(stackDepth + 1);
try {
notifyListener0(future, l);
} finally {
LISTENER_STACK_DEPTH.set(stackDepth);
}
return;
}
}
try {
eventExecutor.execute(new Runnable() {
@Override
public void run() {
notifyListener(eventExecutor, future, l);
}
});
} catch (Throwable t) {
logger.error("Failed to notify a listener. Event loop shut down?", t);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static void notifyListener0(Future future, GenericFutureListener l) {
try {
l.operationComplete(future);
} catch (Throwable t) {
if (logger.isWarnEnabled()) {
logger.warn("An exception was thrown by " + l.getClass().getName() + ".operationComplete()", t);
}
}
}
/**
* Returns a {@link GenericProgressiveFutureListener}, an array of {@link GenericProgressiveFutureListener}, or
* {@code null}.
*/
private synchronized Object progressiveListeners() {
Object listeners = this.listeners;
if (listeners == null) {
// No listeners added
return null;
}
if (listeners instanceof DefaultFutureListeners) {
// Copy DefaultFutureListeners into an array of listeners.
DefaultFutureListeners dfl = (DefaultFutureListeners) listeners;
int progressiveSize = dfl.progressiveSize();
switch (progressiveSize) {
case 0:
return null;
case 1:
for (GenericFutureListener<?> l: dfl.listeners()) {
if (l instanceof GenericProgressiveFutureListener) {
return l;
}
}
return null;
}
GenericFutureListener<?>[] array = dfl.listeners();
GenericProgressiveFutureListener<?>[] copy = new GenericProgressiveFutureListener[progressiveSize];
for (int i = 0, j = 0; j < progressiveSize; i ++) {
GenericFutureListener<?> l = array[i];
if (l instanceof GenericProgressiveFutureListener) {
copy[j ++] = (GenericProgressiveFutureListener<?>) l;
}
}
return copy;
} else if (listeners instanceof GenericProgressiveFutureListener) {
return listeners;
} else {
// Only one listener was added and it's not a progressive listener.
return null;
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
void notifyProgressiveListeners(final long progress, final long total) {
final Object listeners = progressiveListeners();
if (listeners == null) {
return;
}
final ProgressiveFuture<V> self = (ProgressiveFuture<V>) this;
EventExecutor executor = executor();
if (executor.inEventLoop()) {
if (listeners instanceof GenericProgressiveFutureListener[]) {
notifyProgressiveListeners0(
self, (GenericProgressiveFutureListener<?>[]) listeners, progress, total);
} else {
notifyProgressiveListener0(
self, (GenericProgressiveFutureListener<ProgressiveFuture<V>>) listeners, progress, total);
}
} else {
try {
if (listeners instanceof GenericProgressiveFutureListener[]) {
final GenericProgressiveFutureListener<?>[] array =
(GenericProgressiveFutureListener<?>[]) listeners;
executor.execute(new Runnable() {
@Override
public void run() {
notifyProgressiveListeners0(self, array, progress, total);
}
});
} else {
final GenericProgressiveFutureListener<ProgressiveFuture<V>> l =
(GenericProgressiveFutureListener<ProgressiveFuture<V>>) listeners;
executor.execute(new Runnable() {
@Override
public void run() {
notifyProgressiveListener0(self, l, progress, total);
}
});
}
} catch (Throwable t) {
logger.error("Failed to notify listener(s). Event loop shut down?", t);
}
}
}
private static void notifyProgressiveListeners0(
ProgressiveFuture<?> future, GenericProgressiveFutureListener<?>[] listeners, long progress, long total) {
for (GenericProgressiveFutureListener<?> l: listeners) {
if (l == null) {
break;
}
notifyProgressiveListener0(future, l, progress, total);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static void notifyProgressiveListener0(
ProgressiveFuture future, GenericProgressiveFutureListener l, long progress, long total) {
try {
l.operationProgressed(future, progress, total);
} catch (Throwable t) {
if (logger.isWarnEnabled()) {
logger.warn("An exception was thrown by " + l.getClass().getName() + ".operationProgressed()", t);
}
}
}
private static final class CauseHolder {
final Throwable cause;
private CauseHolder(Throwable cause) {
this.cause = cause;
}
}
}
| |
package org.carlspring.strongbox.controllers.configuration;
import org.carlspring.strongbox.config.IntegrationTest;
import org.carlspring.strongbox.forms.storage.routing.RoutingRuleForm;
import org.carlspring.strongbox.forms.storage.routing.RoutingRuleRepositoryForm;
import org.carlspring.strongbox.rest.common.RestAssuredBaseTest;
import org.carlspring.strongbox.storage.routing.MutableRoutingRule;
import org.carlspring.strongbox.storage.routing.MutableRoutingRules;
import org.carlspring.strongbox.storage.routing.RoutingRuleTypeEnum;
import java.util.UUID;
import com.google.common.collect.Lists;
import io.restassured.module.mockmvc.response.ValidatableMockMvcResponse;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import static org.assertj.core.api.Assertions.assertThat;
import static org.carlspring.strongbox.controllers.configuration.RoutingConfigurationController.*;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
/**
* @author Pablo Tirado
*/
@IntegrationTest
public class RoutingConfigurationControllerTestIT
extends RestAssuredBaseTest
{
public static final String GROUP_RELEASES_2 = "group-releases-2";
@Override
@BeforeEach
public void init()
throws Exception
{
super.init();
setContextBaseUrl("/api/configuration/strongbox/routing/rules");
}
@AfterEach
public void cleanup()
{
MutableRoutingRules routingRules = getRoutingRules();
routingRules.getRules()
.stream()
.filter(r -> r.getGroupRepositoryId().contains(GROUP_RELEASES_2))
.forEach(r -> removeRoutingRule(MediaType.APPLICATION_JSON_VALUE, r.getUuid()));
}
@ParameterizedTest
@ValueSource(strings = { MediaType.APPLICATION_JSON_VALUE,
MediaType.TEXT_PLAIN_VALUE })
void testGetRoutingRule(String acceptHeader)
{
addRoutingRule(acceptHeader, true);
MutableRoutingRules routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
MutableRoutingRule rule1 = routingRules.getRules().get(routingRules.getRules().size() - 1);
MutableRoutingRule rule2 = getRoutingRule(rule1.getUuid());
assertThat(rule2).isNotNull();
assertThat(rule1.getUuid()).isEqualTo(rule2.getUuid());
}
@ParameterizedTest
@ValueSource(strings = { MediaType.APPLICATION_JSON_VALUE,
MediaType.TEXT_PLAIN_VALUE })
void testAddAndRemoveRoutingRuleWithRepositories(String acceptHeader)
{
addRoutingRule(acceptHeader, true);
MutableRoutingRules routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(2);
MutableRoutingRule lastRule = routingRules.getRules().get(routingRules.getRules().size() - 1);
assertThat(lastRule.getRepositories()).hasSize(2);
removeRoutingRule(acceptHeader, lastRule.getUuid());
routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(1);
}
@ParameterizedTest
@ValueSource(strings = { MediaType.APPLICATION_JSON_VALUE,
MediaType.TEXT_PLAIN_VALUE })
void testAddAndRemoveRoutingRuleWithoutRepositories(String acceptHeader)
{
addRoutingRule(acceptHeader, false);
MutableRoutingRules routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(2);
MutableRoutingRule lastRule = routingRules.getRules().get(routingRules.getRules().size() - 1);
assertThat(lastRule.getRepositories()).isEmpty();
removeRoutingRule(acceptHeader, lastRule.getUuid());
routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(1);
}
@ParameterizedTest
@ValueSource(strings = { MediaType.APPLICATION_JSON_VALUE,
MediaType.TEXT_PLAIN_VALUE })
void testShouldNotAddAcceptedRuleSet(String acceptHeader)
{
shouldNotAddRoutingRule(acceptHeader);
}
@ParameterizedTest
@ValueSource(strings = { MediaType.APPLICATION_JSON_VALUE,
MediaType.TEXT_PLAIN_VALUE })
void testAddAndUpdateAndRemoveRoutingRule(String acceptHeader)
{
addRoutingRule(acceptHeader, true);
MutableRoutingRules routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(2);
MutableRoutingRule lastRule = routingRules.getRules().get(routingRules.getRules().size() - 1);
updateRoutingRule(acceptHeader, lastRule.getUuid());
routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(2);
lastRule = routingRules.getRules().get(routingRules.getRules().size() - 1);
assertThat(lastRule.getGroupRepositoryId()).isEqualTo("group-releases-2-updated");
assertThat(lastRule.getPattern()).isEqualTo(".*some.test-updated");
assertThat(lastRule.getType()).isEqualTo(RoutingRuleTypeEnum.DENY.getType());
removeRoutingRule(acceptHeader, lastRule.getUuid());
routingRules = getRoutingRules();
assertThat(routingRules).isNotNull();
assertThat(routingRules.getRules()).hasSize(1);
}
private void addRoutingRule(String acceptHeader,
boolean withRepositories)
{
RoutingRuleForm routingRuleForm = new RoutingRuleForm();
routingRuleForm.setPattern(".*some.test");
routingRuleForm.setType(RoutingRuleTypeEnum.ACCEPT);
routingRuleForm.setGroupRepositoryId(GROUP_RELEASES_2);
if (withRepositories)
{
RoutingRuleRepositoryForm routingRuleRepositoryForm = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm.setRepositoryId("releases-with-trash");
RoutingRuleRepositoryForm routingRuleRepositoryForm2 = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm2.setRepositoryId("releases-with-redeployment");
routingRuleForm.setRepositories(Lists.newArrayList(routingRuleRepositoryForm, routingRuleRepositoryForm2));
}
mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(acceptHeader)
.body(routingRuleForm)
.when()
.put(getContextBaseUrl())
.then()
.statusCode(HttpStatus.OK.value())
.body(containsString(SUCCESSFUL_ADD_ROUTING_RULE));
}
private void updateRoutingRule(String acceptHeader,
UUID uuid)
{
String url = getContextBaseUrl() + "/" + uuid.toString();
RoutingRuleForm routingRuleForm = new RoutingRuleForm();
routingRuleForm.setPattern(".*some.test-updated");
routingRuleForm.setType(RoutingRuleTypeEnum.DENY);
routingRuleForm.setGroupRepositoryId(GROUP_RELEASES_2 + "-updated");
RoutingRuleRepositoryForm routingRuleRepositoryForm = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm.setRepositoryId("releases-with-trash");
RoutingRuleRepositoryForm routingRuleRepositoryForm2 = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm2.setRepositoryId("releases-with-redeployment");
routingRuleForm.setRepositories(Lists.newArrayList(routingRuleRepositoryForm, routingRuleRepositoryForm2));
mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(acceptHeader)
.body(routingRuleForm)
.when()
.put(url)
.then()
.statusCode(HttpStatus.OK.value())
.body(containsString(FAILED_UPDATE_ROUTING_RULE));
}
private void shouldNotAddRoutingRule(String acceptHeader)
{
RoutingRuleForm routingRuleForm = new RoutingRuleForm();
routingRuleForm.setPattern("");
routingRuleForm.setType(RoutingRuleTypeEnum.ACCEPT);
RoutingRuleRepositoryForm routingRuleRepositoryForm = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm.setRepositoryId("");
RoutingRuleRepositoryForm routingRuleRepositoryForm2 = new RoutingRuleRepositoryForm();
routingRuleRepositoryForm2.setRepositoryId("");
routingRuleForm.setRepositories(Lists.newArrayList(routingRuleRepositoryForm, routingRuleRepositoryForm2));
ValidatableMockMvcResponse response = mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(acceptHeader)
.body(routingRuleForm)
.when()
.put(getContextBaseUrl())
.then()
.statusCode(HttpStatus.BAD_REQUEST.value())
.body(containsString(FAILED_ADD_ROUTING_RULE_FORM_ERRORS));
if (acceptHeader.equals(MediaType.APPLICATION_JSON_VALUE))
{
response.body("errors", hasSize(equalTo(5)));
response.body(containsString("must not be blank"));
response.body(containsString("A pattern must be specified"));
}
}
private void removeRoutingRule(String acceptHeader,
UUID uuid)
{
String url = getContextBaseUrl() + "/" + uuid.toString();
mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(acceptHeader)
.when()
.delete(url)
.then()
.statusCode(HttpStatus.OK.value())
.body(containsString(SUCCESSFUL_REMOVE_ROUTING_RULE));
}
private MutableRoutingRules getRoutingRules()
{
String url = getContextBaseUrl();
return mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(MediaType.APPLICATION_JSON_VALUE)
.when()
.get(url)
.then()
.statusCode(HttpStatus.OK.value())
.extract()
.as(MutableRoutingRules.class);
}
private MutableRoutingRule getRoutingRule(UUID uuid)
{
String url = getContextBaseUrl();
return mockMvc.contentType(MediaType.APPLICATION_JSON_VALUE)
.accept(MediaType.APPLICATION_JSON_VALUE)
.when()
.get(url + '/' + uuid.toString())
.then()
.statusCode(HttpStatus.OK.value())
.extract()
.as(MutableRoutingRule.class);
}
}
| |
package de.cinovo.cloudconductor.server.handler;
import de.cinovo.cloudconductor.api.model.Repo;
import de.cinovo.cloudconductor.api.model.RepoMirror;
import de.cinovo.cloudconductor.server.dao.IRepoDAO;
import de.cinovo.cloudconductor.server.dao.IRepoMirrorDAO;
import de.cinovo.cloudconductor.server.dao.ITemplateDAO;
import de.cinovo.cloudconductor.server.model.ERepo;
import de.cinovo.cloudconductor.server.model.ERepoMirror;
import de.cinovo.cloudconductor.server.repo.indexer.IRepoIndexer;
import de.cinovo.cloudconductor.server.repo.indexer.IndexFileIndexer;
import de.cinovo.cloudconductor.server.repo.indexer.RPMIndexer;
import de.cinovo.cloudconductor.server.repo.provider.AWSS3Provider;
import de.cinovo.cloudconductor.server.repo.provider.FileProvider;
import de.cinovo.cloudconductor.server.repo.provider.HTTPProvider;
import de.cinovo.cloudconductor.server.repo.provider.IRepoProvider;
import de.cinovo.cloudconductor.server.tasks.IServerRepoTaskHandler;
import de.taimos.restutils.RESTAssert;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response.Status;
/**
* Copyright 2017 Cinovo AG<br>
* <br>
*
* @author psigloch
*/
@Service
public class RepoHandler {
@Autowired
private IRepoMirrorDAO repoMirrorDAO;
@Autowired
private IRepoDAO repoDAO;
@Autowired
private ITemplateDAO templateDAO;
@Autowired
private IServerRepoTaskHandler repoTaskHandler;
/**
* @param repoName the name of the repo
* @return provider for given repo
*/
@Transactional
public IRepoProvider findRepoProvider(String repoName) {
ERepo erepo = this.repoDAO.findByName(repoName);
if (erepo == null) {
return null;
}
ERepoMirror primaryMirror = this.findPrimaryMirror(erepo);
return this.findRepoProvider(primaryMirror, erepo);
}
/**
* @param mirror the mirror you want the repo provider for
* @param erepo the repo of the mirror
* @return the repo provider
*/
public IRepoProvider findRepoProvider(ERepoMirror mirror, ERepo erepo) {
if (mirror == null) {
return null;
}
try {
switch (mirror.getProviderType()) {
case AWSS3:
return new AWSS3Provider(mirror, erepo);
case FILE:
return new FileProvider(mirror, erepo);
case HTTP:
return new HTTPProvider(mirror, erepo);
default:
return null;
}
} catch (Exception e) {
return null;
}
}
/**
* @param mirror the mirror you want the repo indexer for
* @return the repo indexer
*/
public IRepoIndexer findRepoIndexer(ERepoMirror mirror) {
if (mirror == null) {
return null;
}
switch (mirror.getIndexerType()) {
case FILE:
return new IndexFileIndexer();
case RPM:
return new RPMIndexer();
default:
return null;
}
}
/**
* @param newMirror the repo mirror to save
* @return the saved repo mirror
*/
public ERepoMirror createMirror(RepoMirror newMirror) {
ERepoMirror emirror = this.createEntity(newMirror);
ERepo erepo = this.updatePrimaryMirrorOfRepo(newMirror.getRepo(), emirror.getId());
// trigger repo index tasks
this.repoTaskHandler.newRepo(erepo);
return emirror;
}
/**
* @param repoName the repo name
* @param id the mirror id
* @return the saved repo
*/
@Transactional
public ERepo updatePrimaryMirrorOfRepo(String repoName, Long id) {
RESTAssert.assertNotEmpty(repoName);
RESTAssert.assertNotNull(id);
ERepo erepo = this.repoDAO.findByName(repoName);
Long mirrorCount = this.repoMirrorDAO.countForRepo(erepo.getId());
// set to primary if this is the first mirror
if (mirrorCount == 1) {
erepo.setPrimaryMirrorId(id);
erepo = this.repoDAO.save(erepo);
}
return erepo;
}
/**
* @param mirror the data
* @return the saved entity
* @throws WebApplicationException on error
*/
@Transactional
public ERepoMirror createEntity(RepoMirror mirror) throws WebApplicationException {
String repoName = mirror.getRepo();
ERepo erepo = this.repoDAO.findByName(repoName);
if (erepo == null) {
throw new NotFoundException(String.format("Repo '%s' not found!", repoName));
}
ERepoMirror emirror = new ERepoMirror();
this.fillFields(emirror, erepo, mirror);
RESTAssert.assertNotNull(emirror);
return this.repoMirrorDAO.save(emirror);
}
/**
* @param updatedMirror the updated mirror
* @return the saved mirror
*/
public ERepoMirror updateMirror(RepoMirror updatedMirror) {
ERepoMirror emirror = this.repoMirrorDAO.findById(updatedMirror.getId());
RESTAssert.assertNotNull(emirror);
ERepoMirror saved = this.updateEntity(emirror, updatedMirror);
RESTAssert.assertNotNull(saved);
ERepo erepo = this.repoDAO.findByName(updatedMirror.getRepo());
this.repoTaskHandler.newRepo(erepo);
return saved;
}
/**
* @param emirror the entity to update
* @param mirror the update data
* @return the updated, saved entity
* @throws WebApplicationException on error
*/
@Transactional
public ERepoMirror updateEntity(ERepoMirror emirror, RepoMirror mirror) throws WebApplicationException {
String repoName = mirror.getRepo();
ERepo erepo = this.repoDAO.findByName(repoName);
if (erepo == null) {
throw new NotFoundException(String.format("Repo '%s' not found!", repoName));
}
ERepoMirror entity = this.fillFields(emirror, erepo, mirror);
RESTAssert.assertNotNull(entity);
return this.repoMirrorDAO.save(entity);
}
/**
* @param repo the repo entity
* @return whether the mirror is in use by a template or not
*/
public boolean checkIfInUse(ERepo repo) {
return this.templateDAO.countUsingRepo(repo) > 0;
}
/**
* @param newRepo the new repository
* @return the saved repository
*/
@Transactional
public ERepo createRepo(Repo newRepo) {
RESTAssert.assertFalse(this.repoDAO.exists(newRepo.getName()));
ERepo eRepo = new ERepo();
this.fillFields(eRepo, newRepo);
ERepo savedRepo = this.repoDAO.save(eRepo);
RESTAssert.assertNotNull(savedRepo);
this.repoTaskHandler.newRepo(savedRepo);
return savedRepo;
}
/**
* @param updatedRepo the updated Repo
*/
@Transactional
public void updateRepo(Repo updatedRepo) {
ERepo erepo = this.repoDAO.findById(updatedRepo.getId());
RESTAssert.assertNotNull(erepo);
ERepo eRepo = this.fillFields(erepo, updatedRepo);
ERepo saved = this.repoDAO.save(eRepo);
RESTAssert.assertNotNull(saved);
this.repoTaskHandler.newRepo(saved);
}
/**
* Deletes a repo and all its mirrors
*
* @param erepo the repo to delete
*/
public void deleteEntity(ERepo erepo) {
if (this.checkIfInUse(erepo)) {
throw new WebApplicationException(String.format("Repository '%s' is still used by a template!", erepo.getName()), Status.CONFLICT);
}
long id = erepo.getId();
this.repoMirrorDAO.deleteForRepo(erepo);
this.repoDAO.delete(erepo);
this.repoTaskHandler.deleteRepo(id);
}
/**
* @param repo the repo
* @return the primary mirror of that repo
*/
public ERepoMirror findPrimaryMirror(ERepo repo) {
if (repo.getPrimaryMirrorId() == null) {
return null;
}
return this.repoMirrorDAO.findById(repo.getPrimaryMirrorId());
}
private ERepo fillFields(ERepo eRepo, Repo repo) {
if ((repo.getName() != null) && !repo.getName().isEmpty()) {
eRepo.setName(repo.getName());
}
eRepo.setPrimaryMirrorId(repo.getPrimaryMirror());
if ((repo.getMirrors() != null) && (this.findPrimaryMirror(eRepo) == null)) {
eRepo.setPrimaryMirrorId(null);
}
return eRepo;
}
private ERepoMirror fillFields(ERepoMirror emirror, ERepo erepo, RepoMirror mirror) {
emirror.setRepoId(erepo.getId());
emirror.setAccessKeyId(mirror.getAccessKeyId());
emirror.setAwsRegion(mirror.getAwsRegion());
emirror.setBasePath(mirror.getBasePath());
emirror.setBucketName(mirror.getBucketName());
emirror.setDescription(mirror.getDescription());
emirror.setIndexerType(mirror.getIndexerType());
emirror.setPath(mirror.getPath());
emirror.setProviderType(mirror.getProviderType());
emirror.setSecretKey(mirror.getSecretKey());
return emirror;
}
}
| |
/*
*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.datamodel;
import java.util.Arrays;
import java.util.List;
import junit.framework.Test;
import org.junit.Assert;
import org.netbeans.junit.NbModuleSuite;
import org.netbeans.junit.NbTestCase;
import org.openide.util.Exceptions;
/**
* Tests for validation on each correlation attribute type.
*/
public class CorrelationAttributeNormalizerTest extends NbTestCase {
public static Test suite() {
NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(CorrelationAttributeNormalizerTest.class).
clusters(".*").
enableModules(".*");
return conf.suite();
}
public CorrelationAttributeNormalizerTest(String name) {
super(name);
}
public void testValidateMd5() {
final String aValidHash = "e34a8899ef6468b74f8a1048419ccc8b"; //should pass
final String anInValidHash = "e34asdfa8899ef6468b74f8a1048419ccc8b"; //should fail
final String aValidHashWithCaps = "E34A8899EF6468B74F8A1048419CCC8B"; //should pass and be lowered
final String emptyHash = ""; //should fail
final String nullHash = null; //should fail
final int FILES_TYPE_ID = CorrelationAttributeInstance.FILES_TYPE_ID;
try {
assertTrue("This hash should just work", CorrelationAttributeNormalizer.normalize(FILES_TYPE_ID, aValidHash).equals(aValidHash));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue("This hash just needs to be converted to lower case", CorrelationAttributeNormalizer.normalize(CorrelationAttributeInstance.FILES_TYPE_ID, aValidHashWithCaps).equals(aValidHash));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
CorrelationAttributeNormalizer.normalize(FILES_TYPE_ID, anInValidHash);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
CorrelationAttributeNormalizer.normalize(FILES_TYPE_ID, emptyHash);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
CorrelationAttributeNormalizer.normalize(FILES_TYPE_ID, nullHash);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
}
private static final String WE_EXPECT_AN_EXCEPTION_HERE = "We expect an exception here.";
private static final String THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION = "This should have thrown an exception.";
/**
* Class for organizing records of successfully parsing and fail to parse
* domains.
*/
private static class DomainData {
private final String originalString;
private final String resultDomain;
private final boolean shouldParse;
static DomainData fail(String originalString) {
return new DomainData(originalString, null, false);
}
static DomainData pass(String originalString, String resultDomain) {
return new DomainData(originalString, resultDomain, true);
}
private DomainData(String originalString, String resultDomain, boolean shouldParse) {
this.originalString = originalString;
this.resultDomain = resultDomain;
this.shouldParse = shouldParse;
}
String getOriginalString() {
return originalString;
}
String getResultDomain() {
return resultDomain;
}
boolean shouldParse() {
return shouldParse;
}
}
private static final String THIS_DOMAIN_SHOULD_PASS = "This domain should pass.";
private static final List<DomainData> DOMAIN_DATA = Arrays.asList(
DomainData.pass("www.test.com", "test.com"),
DomainData.fail("http://www.test.com"),
DomainData.pass("test.com", "test.com"),
DomainData.fail("http://1270.0.1"),
DomainData.fail("?>\\/)(*&.com"),
DomainData.fail(null),
DomainData.fail(""),
DomainData.fail("HTTP://tests.com"),
DomainData.fail("http://www.test.com/aPage?aQuestion=aParam&anotherQuestion=anotherParam"),
DomainData.pass("WWW.TEST.COM", "test.com"),
DomainData.pass("TEST.COM", "test.com")
);
public void testValidateDomain() {
final int DOMAIN_TYPE_ID = CorrelationAttributeInstance.DOMAIN_TYPE_ID;
for (DomainData item : DOMAIN_DATA) {
if (item.shouldParse()) {
String input = item.getOriginalString();
String expected = item.getResultDomain();
try {
String normalizedDomain = CorrelationAttributeNormalizer.normalize(DOMAIN_TYPE_ID, input);
assertTrue(String.format("Expected domain '%s' to be normalized, but was null.", item.getOriginalString()), normalizedDomain != null);
assertTrue(String.format("Was unable to normalize domain '%s' to '%s' but received %s instead.", input, expected, normalizedDomain), normalizedDomain.equals(expected));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(String.format("Unable to properly parse %s to %s. Received: %s", input, expected, ex.getMessage()));
}
} else {
try {
CorrelationAttributeNormalizer.normalize(DOMAIN_TYPE_ID, item.getOriginalString());
fail(String.format("Original string: '%s' should have failed to parse.", item.getOriginalString()));
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
}
}
}
public void testValidateEmail() {
final String goodEmailOne = "bsweeney@cipehrtechsolutions.com"; //should pass
final String goodEmailTwo = "BSWEENEY@ciphertechsolutions.com"; //should pass and be lowered
final String badEmailThree = ""; //should fail
final String badEmailFour = null; //should fail
final String badEmailFive = "asdf"; //should fail
final String goodEmailSix = "asdf@asdf"; //TODO looks bad but the lib accepts it...
final String badEmailSeven = "asdf.asdf"; //should
final int EMAIL_TYPE_ID = CorrelationAttributeInstance.EMAIL_TYPE_ID;
try {
assertTrue("This email should pass.", CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, goodEmailOne).equals(goodEmailOne));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue("This email should pass.", CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, goodEmailTwo).equals(goodEmailTwo.toLowerCase()));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, badEmailThree);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, badEmailFour);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, badEmailFive);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try { //TODO consider a better library?
assertTrue("This email should pass", CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, goodEmailSix).equals(goodEmailSix));
} catch (CorrelationAttributeNormalizationException ex) {
fail(ex.getMessage());
}
try {
CorrelationAttributeNormalizer.normalize(EMAIL_TYPE_ID, badEmailSeven);
fail(THIS_SHOULD_HAVE_THROWN_AN_EXCEPTION);
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
}
public void testValidatePhone() {
final String goodPnOne = "19784740486";
final String goodPnTwo = "1(978) 474-0486";
final String goodPnThree = "+19784740486";
final String goodPnFour = "1 978-474-0486";
final String badPnFive = "9879879819784740486";
final String goodPnSix = "+1(978) 474-0486";
final String goodPnSeven = "+1(978) 474-0486";
final String badPnEight = "asdfasdfasdf";
final String badPnNine = "asdf19784740486adsf";
final int PHONE_TYPE_ID = CorrelationAttributeInstance.PHONE_TYPE_ID;
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnOne).equals(goodPnOne));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnTwo).equals(goodPnOne));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnThree).equals(goodPnThree));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnFour).equals(goodPnOne));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, badPnFive);
//fail("This should have thrown an exception."); //this will eventually pass when we do a better job at this
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnSix).equals(goodPnThree));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
assertTrue(THIS_PHONE_NUMBER_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, goodPnSeven).equals(goodPnThree));
} catch (CorrelationAttributeNormalizationException ex) {
Exceptions.printStackTrace(ex);
fail(ex.getMessage());
}
try {
CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, badPnEight);
fail("This should have thrown an exception.");
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
try {
CorrelationAttributeNormalizer.normalize(PHONE_TYPE_ID, badPnNine);
fail("This should have thrown an exception.");
} catch (CorrelationAttributeNormalizationException ex) {
assertTrue(WE_EXPECT_AN_EXCEPTION_HERE, true);
}
}
private static final String THIS_PHONE_NUMBER_SHOULD_PASS = "This phone number should pass.";
public void testValidateUsbId() {
//TODO will need to be updated once usb validation does something interesting
final String goodIdOne = "0202:AAFF"; //should pass
/*final String goodIdTwo = "0202:aaff"; //should pass
final String badIdThree = "0202:axxf"; //should fail
final String badIdFour = ""; //should fail
final String badIdFive = null; //should fail
final String goodIdSix = "0202 AAFF"; //should pass
final String goodIdSeven = "0202AAFF"; //should pass
final String goodIdEight = "0202-AAFF"; //should pass*/
final int USBID_TYPE_ID = CorrelationAttributeInstance.USBID_TYPE_ID;
try {
assertTrue(THIS_USB_ID_SHOULD_PASS, CorrelationAttributeNormalizer.normalize(USBID_TYPE_ID, goodIdOne).equals(goodIdOne));
} catch (CorrelationAttributeNormalizationException ex) {
Assert.fail(ex.getMessage());
}
}
private static final String THIS_USB_ID_SHOULD_PASS = "This USB ID should pass.";
}
| |
/*******************************************************************************
* Copyright (C) 2017 International Business Machines Corporation
* All Rights Reserved
*******************************************************************************/
package com.ibm.streamsx.health.vines;
import java.io.InputStream;
import java.io.ObjectStreamException;
import java.text.ParseException;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.util.List;
import java.util.Locale;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.log4j.Logger;
import com.google.common.io.Resources;
import com.ibm.streamsx.health.ingest.types.model.Device;
import com.ibm.streamsx.health.ingest.types.model.Observation;
import com.ibm.streamsx.health.ingest.types.model.Reading;
import com.ibm.streamsx.health.ingest.types.model.ReadingSource;
import com.ibm.streamsx.health.ingest.types.model.ReadingType;
import com.ibm.streamsx.health.ingest.types.model.ReadingTypeSystem;
import com.ibm.streamsx.health.vines.model.Channel;
import com.ibm.streamsx.health.vines.model.ITerm;
import com.ibm.streamsx.health.vines.model.ITermValue;
import com.ibm.streamsx.health.vines.model.Patient;
import com.ibm.streamsx.health.vines.model.Term;
import com.ibm.streamsx.health.vines.model.TermArray;
import com.ibm.streamsx.health.vines.model.TermValueMap;
import com.ibm.streamsx.health.vines.model.TermValueString;
import com.ibm.streamsx.health.vines.model.Terms;
import com.ibm.streamsx.health.vines.model.Vines;
import com.ibm.streamsx.health.vines.model.WaveformHelper;
import com.ibm.streamsx.topology.function.Function;
import com.ibm.streamsx.topology.function.Supplier;
public class VinesToObservationParser implements Function<Vines, VinesParserResult> {
private static final long serialVersionUID = 1L;
public static final String SOURCE_TYPE = "channel";
public static final String VINES_SYSTEM_NAME = "vines";
private static final String MAPPING_FILE = "ieee2loinc.csv";
private static final Logger logger = Logger.getLogger(VinesToObservationParser.class);
String DATE_TIME_PATTERN = ""
+ "[yyyy-MM-dd'T'HH:mm:ss.SSSSSSS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss.SSSSSS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss.SSSSS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss.SSSS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss.SSS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss.SS[X]]"
+ "[yyyy-MM-dd'T'HH:mm:ss[.S][X]]";
private DateTimeFormatter formatter;
private transient VinesToStreamsCodeLookupTable lookupTable;
private Supplier<String> mappingEnabledSupplier;
private boolean isMappingEnabled;
public VinesToObservationParser(Supplier<String> mappingEnabledSupplier) {
this.mappingEnabledSupplier = mappingEnabledSupplier;
}
public Object readResolve() throws ObjectStreamException {
formatter = new DateTimeFormatterBuilder()
.appendPattern(DATE_TIME_PATTERN)
.toFormatter(Locale.ENGLISH);
isMappingEnabled = Boolean.parseBoolean(mappingEnabledSupplier.get());
try {
InputStream inputStream = Resources.getResource(MAPPING_FILE).openStream();
lookupTable = new VinesToStreamsCodeLookupTable(inputStream);
} catch (Exception e) {
ObjectStreamException ose = new ObjectStreamException() {
private static final long serialVersionUID = 1L;
};
ose.addSuppressed(e);
throw ose;
}
return this;
}
@Override
public VinesParserResult apply(Vines v) {
VinesParserResult parserResult = new VinesParserResult(v);
try {
// determine if message is a vitals or waveform message
Terms terms = v.getData().getBody().getTerms();
// waveform messages only contain a single channel,
// vitals messages can contain more than one channel
if(terms.size() > 1) {
// more than 1 channel indicates
// this is a vitals message
mapVitalMessage(v, parserResult);
} else if(terms.size() == 1) {
// may be either vitals or waveform so check for
// MDC_ATTR_WAV term, which is always present in
// waveform message
Channel channel = terms.get(terms.getChannelNames().get(0));
if(channel.containsKey("MDC_ATTR_WAV")) {
mapWaveformMessage(v, parserResult);
} else {
mapVitalMessage(v, parserResult);
}
}
} catch (Exception e) {
String msg = "Error parsing Vines message: " + v.getRawMessage();
logger.error(v.getRawMessage());
e.printStackTrace();
parserResult.addErrorMesage(msg);
}
return parserResult;
}
private void mapVitalMessage(Vines v, VinesParserResult parserResult) {
// generate Patient ID
String patientId = "";
Patient patient = v.getData().getPatient();
if(patient != null) {
patientId = patient.getMRN();
}
// generate device type (same for all observations)
Device device = new Device();
device.setId(getDeviceId(v));
Terms terms = v.getData().getBody().getTerms();
for(String channelName : terms.getChannelNames()) {
Channel channel = terms.getChannel(channelName);
// generate ReadingSource
ReadingSource readingSource = new ReadingSource();
readingSource.setId(channelName);
readingSource.setSourceType(SOURCE_TYPE);
readingSource.setDeviceId(getDeviceId(v));
// iterate over all terms and generate Readings & Observations
for(String termName : channel.getTermNames()) {
ITerm term = channel.getTerm(termName);
if(term instanceof Term) {
Reading reading = new Reading();
Term t = (Term)term;
ITermValue itv = t.getValue();
if(itv instanceof TermValueString) {
String value = ((TermValueString)itv).getValue();
if(!NumberUtils.isNumber(value)) {
continue; // skip term as there is no numeric value
}
String date = t.getDate();
long epochTime = 0l;
try {
epochTime = toEpoch(date);
} catch (ParseException e) {
String msg = "Error parsing timestamp: error=" + e.getLocalizedMessage() + ", timestamp=" + date;
logger.error(msg, e);
e.printStackTrace();
parserResult.addErrorMesage(msg);
}
reading.setValue(Double.valueOf(value));
reading.setUom(t.getUOM());
reading.setTimestamp(epochTime);
reading.setReadingType(getReadingType(termName));
parserResult.addObservation(new Observation(device, patientId, readingSource, reading));
}
} else {
// Array terms not expected in normal vines messages
String msg = "Error parsing Vines message: Array terms not expected in normal vines messages.";
logger.error(msg);
parserResult.addErrorMesage(msg);
}
}
}
}
private void mapWaveformMessage(Vines v, VinesParserResult parserResult) {
// generate Patient ID
String patientId = "";
Patient patient = v.getData().getPatient();
if(patient != null) {
patientId = patient.get_id();
}
// generate device type (same for all observations)
Device device = new Device();
device.setId(getDeviceId(v));
long startTime = 0;
long period = 0;
try {
startTime = toEpoch(v.getData().getBody().getStartTime());
} catch(ParseException e) {
String msg = "Error parsing timestamp: error=" + e.getLocalizedMessage() + ", timestamp=" + v.getData().getBody().getStartTime();
logger.error(msg);
e.printStackTrace();
parserResult.addErrorMesage(msg);
}
Terms terms = v.getData().getBody().getTerms();
Channel channel = terms.getChannel(terms.getChannelNames().get(0));
ReadingSource readingSource = new ReadingSource();
readingSource.setSourceType(SOURCE_TYPE);
readingSource.setId(terms.getChannelNames().get(0));
readingSource.setDeviceId(getDeviceId(v));
// get the sample rate
ITerm iterm = channel.getTerm("MDC_ATTR_SAMP_RATE");
if(iterm instanceof Term) {
Term term = (Term)iterm;
// set the multiplier based on the UOM
// in order to convert the sample rate
// milliseconds
double dividend = 1;
if(term.getUOM().equals("MDC_DIM_SEC")) {
dividend = 1000;
}
ITermValue itv = term.getValue();
if(itv instanceof TermValueString) {
String value = ((TermValueString)itv).getValue();
if(NumberUtils.isNumber(value)) {
period = Math.round((dividend / Double.valueOf(value)));
}
}
}
// get the UOM
iterm = channel.getTerm("MDC_ATTR_SCALE_RANGE");
String uom = "";
if(iterm instanceof Term) {
Term term = (Term)iterm;
uom = term.getUOM();
}
// map waveform samples to observations
iterm = channel.getTerm("MDC_ATTR_WAV");
if(iterm instanceof TermArray) {
TermArray term = (TermArray)iterm;
for(ITermValue itv : term) {
if(itv instanceof TermValueMap) {
TermValueMap tvm = (TermValueMap)itv;
for(String waveName : tvm.keySet()) {
ITerm waveITerm = tvm.get(waveName);
if(waveITerm instanceof Term) {
Term waveTerm = (Term)waveITerm;
ITermValue waveTermValue = waveTerm.getValue();
if(waveTermValue instanceof TermValueString) {
String waveStr = ((TermValueString)waveTermValue).getValue();
List<Double> waveform = WaveformHelper.parseWaveform(waveStr);
for(int i = 0; i < waveform.size(); ++i) {
Reading reading = new Reading();
reading.setReadingType(getReadingType(waveName));
reading.setValue(waveform.get(i));
reading.setTimestamp(startTime + period*i);
reading.setUom(uom);
parserResult.addObservation(new Observation(device, patientId, readingSource, reading));
}
}
}
}
}
}
}
}
private ReadingType getReadingType(String vinesName) {
ReadingType readingType;
String code = isMappingEnabled ? lookupTable.lookupPlatformCode(vinesName) : null;
if(code != null) {
readingType = new ReadingType(ReadingTypeSystem.STREAMS_CODE_SYSTEM, code);
} else {
readingType = new ReadingType(VINES_SYSTEM_NAME, vinesName);
}
return readingType;
}
private String getDeviceId(Vines v) {
return v.getData().getBody().getDeviceId();
}
private long toEpoch(String date) throws ParseException {
try {
// assume date contains timezone information
OffsetDateTime odt = OffsetDateTime.parse(date, formatter);
return odt.toInstant().toEpochMilli();
} catch (DateTimeParseException e) {
try {
// date may be missing timezone, use system default
LocalDateTime ldt = LocalDateTime.parse(date, formatter);
return ldt.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
} catch (DateTimeParseException e1) {
e.printStackTrace();
throw e1;
}
}
}
}
| |
/*
* $Id$
*
* SARL is an general-purpose agent programming language.
* More details on http://www.sarl.io
*
* Copyright (C) 2014-2016 the original authors or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sarl.lang.mwe2.codebuilder.fragments;
import javax.inject.Inject;
import javax.inject.Named;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.xtend2.lib.StringConcatenationClient;
import org.eclipse.xtext.Constants;
import org.eclipse.xtext.common.types.JvmParameterizedTypeReference;
import org.eclipse.xtext.common.types.JvmTypeReference;
import org.eclipse.xtext.common.types.util.Primitives;
import org.eclipse.xtext.common.types.util.TypeReferences;
import org.eclipse.xtext.resource.IResourceFactory;
import org.eclipse.xtext.resource.XtextResource;
import org.eclipse.xtext.xbase.compiler.ImportManager;
import org.eclipse.xtext.xbase.imports.IImportsConfiguration;
import org.eclipse.xtext.xbase.lib.Pure;
import org.eclipse.xtext.xbase.typesystem.references.LightweightTypeReference;
import org.eclipse.xtext.xbase.typesystem.references.LightweightTypeReferenceFactory;
import org.eclipse.xtext.xbase.typesystem.references.StandardTypeReferenceOwner;
import org.eclipse.xtext.xbase.typesystem.util.CommonTypeComputationServices;
import org.eclipse.xtext.xtext.generator.model.JavaFileAccess;
import org.eclipse.xtext.xtext.generator.model.TypeReference;
/** Generator of the abstract code builder.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*/
public class AbstractBuilderBuilderFragment extends AbstractSubCodeBuilderFragment {
@Override
public void generate() {
generateAbstractBuilder();
}
/** Generate the abstract builder.
*/
@SuppressWarnings("checkstyle:all")
protected void generateAbstractBuilder() {
final TypeReference abstractBuilder = getAbstractBuilderImpl();
StringConcatenationClient content = new StringConcatenationClient() {
@Override
protected void appendTo(TargetStringConcatenation it) {
it.append("/** Abstract implementation of a builder for the " //$NON-NLS-1$
+ getLanguageName() + " language."); //$NON-NLS-1$
it.newLine();
it.append(" */"); //$NON-NLS-1$
it.newLine();
it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$
it.newLine();
it.append("public abstract class "); //$NON-NLS-1$
it.append(abstractBuilder.getSimpleName());
it.append(" {"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(CommonTypeComputationServices.class);
it.append(" services;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(ImportManager.class);
it.append(" importManager;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(TypeReferences.class);
it.append(" typeReferences;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(Primitives.class);
it.append(" primitives;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(IImportsConfiguration.class);
it.append(" importsConfiguration;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(IResourceFactory.class);
it.append(" resourceFactory;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\tprivate String fileExtension;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tpublic void setFileExtensions(@"); //$NON-NLS-1$
it.append(Named.class);
it.append("("); //$NON-NLS-1$
it.append(Constants.class);
it.append(".FILE_EXTENSIONS) String fileExtensions) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\tthis.fileExtension = fileExtensions.split(\"[:;,]+\")[0];"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the script's file extension."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tpublic String getScriptFileExtension() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.fileExtension;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the builder of type references."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference builder."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(TypeReferences.class);
it.append(" getTypeReferences() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.typeReferences;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the primitive type tools."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the primitive type tools."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(Primitives.class);
it.append(" getPrimitiveTypes() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.primitives;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Create a reference to the given type."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param context - the context."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param typeName - the name of the type."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(JvmParameterizedTypeReference.class);
it.append(" newTypeRef("); //$NON-NLS-1$
it.append(EObject.class);
it.append(" context, String typeName) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t"); //$NON-NLS-1$
it.append(TypeReferences.class);
it.append(" typeRefs = getTypeReferences();"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn newTypeRef(context, typeName, typeRefs.getTypeForName(typeName, context));"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Create a reference to the given type."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param context - the context."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param type - the type."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(JvmParameterizedTypeReference.class);
it.append(" newTypeRef("); //$NON-NLS-1$
it.append(EObject.class);
it.append(" context, Class<?> type) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t"); //$NON-NLS-1$
it.append(TypeReferences.class);
it.append(" typeRefs = getTypeReferences();"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn newTypeRef(context, type.getName(), typeRefs.getTypeForName(type, context));"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(JvmParameterizedTypeReference.class);
it.append(" newTypeRef("); //$NON-NLS-1$
it.append(EObject.class);
it.append(" context, String typeName, "); //$NON-NLS-1$
it.append(JvmTypeReference.class);
it.append(" typeReference) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\tif (!isTypeReference(typeReference) && !getPrimitiveTypes().isPrimitive(typeReference)) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t"); //$NON-NLS-1$
it.append(TypeReferences.class);
it.append(" typeRefs = getTypeReferences();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfor (String packageName : getImportsConfiguration().getImplicitlyImportedPackages(("); //$NON-NLS-1$
it.append(XtextResource.class);
it.append(") context.eResource())) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\ttypeReference = typeRefs.getTypeForName(packageName + \".\" + typeName, context);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\tif (isTypeReference(typeReference)) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t\tgetImportManager().addImportFor(typeReference.getType());"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t\treturn ("); //$NON-NLS-1$
it.append(JvmParameterizedTypeReference.class);
it.append(") typeReference;"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\tif (!isTypeReference(typeReference)) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthrow new TypeNotPresentException(typeName, null);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\tgetImportManager().addImportFor(typeReference.getType());"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn ("); //$NON-NLS-1$
it.append(JvmParameterizedTypeReference.class);
it.append(") typeReference;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies if the first parameter is a subtype of the second parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param context - the context."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param subType - the subtype to test."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param superType - the expected super type."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected boolean isSubTypeOf("); //$NON-NLS-1$
it.append(EObject.class);
it.append(" context, "); //$NON-NLS-1$
it.append(JvmTypeReference.class);
it.append(" subType, "); //$NON-NLS-1$
it.append(JvmTypeReference.class);
it.append(" superType) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\tif (isTypeReference(superType) && isTypeReference(subType)) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t"); //$NON-NLS-1$
it.append(StandardTypeReferenceOwner.class);
it.append(" owner = new "); //$NON-NLS-1$
it.append(StandardTypeReferenceOwner.class);
it.append("(services, context);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t"); //$NON-NLS-1$
it.append(LightweightTypeReferenceFactory.class);
it.append(" factory = new "); //$NON-NLS-1$
it.append(LightweightTypeReferenceFactory.class);
it.append("(owner, false);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t"); //$NON-NLS-1$
it.append(LightweightTypeReference.class);
it.append(" reference = factory.toLightweightReference(subType);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\treturn reference.isSubtypeOf(superType.getType());"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn false;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies if the given object is a valid type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected boolean isTypeReference("); //$NON-NLS-1$
it.append(JvmTypeReference.class);
it.append(" typeReference) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn (typeReference != null && !typeReference.eIsProxy()"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t&& typeReference.getType() != null && !typeReference.getType().eIsProxy());"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the import's configuration."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the import's configuration."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(IImportsConfiguration.class);
it.append(" getImportsConfiguration() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.importsConfiguration;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Compute a unused URI for a synthetic resource."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param resourceSet - the resource set in which the resource should be located."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the uri."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(URI.class);
it.append(" computeUnusedUri("); //$NON-NLS-1$
it.append(ResourceSet.class);
it.append(" resourceSet) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\tString name = \"__synthetic\";"); //$NON-NLS-1$
it.newLine();
it.append("\t\tfor (int i = 0; i < Integer.MAX_VALUE; ++i) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t"); //$NON-NLS-1$
it.append(URI.class);
it.append(" syntheticUri = "); //$NON-NLS-1$
it.append(URI.class);
it.append(".createURI(name + i + \".\" + getScriptFileExtension());"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tif (resourceSet.getResource(syntheticUri, false) == null) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\treturn syntheticUri;"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\tthrow new IllegalStateException();"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the resource factory."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the resource factory."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(IResourceFactory.class);
it.append(" getResourceFactory() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.resourceFactory;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies if the type could contains functions with a body."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected boolean isActionBodyAllowed("); //$NON-NLS-1$
it.append(getCodeElementExtractor().getLanguageTopElementType());
it.append(" type) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn "); //$NON-NLS-1$
if (getCodeBuilderConfig().getNoActionBodyTypes().isEmpty()) {
it.append("true"); //$NON-NLS-1$
} else {
it.append("!("); //$NON-NLS-1$
boolean first = true;
for (String noBodyType : getCodeBuilderConfig().getNoActionBodyTypes()) {
if (first) {
first = false;
} else {
it.newLine();
it.append("\t\t\t|| "); //$NON-NLS-1$
}
it.append("type instanceof "); //$NON-NLS-1$
it.append(new TypeReference(getCodeElementExtractor().getLanguageBasePackage() + "." + noBodyType)); //$NON-NLS-1$
}
it.append(")"); //$NON-NLS-1$
}
it.append(";"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the import manager that stores the imported types."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the import manager."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tprotected "); //$NON-NLS-1$
it.append(ImportManager.class);
it.append(" getImportManager() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.importManager;"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
}
};
JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(abstractBuilder, content);
javaFile.writeTo(getSrcGen());
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cognitoidentity;
import com.amazonaws.services.cognitoidentity.model.*;
/**
* Abstract implementation of {@code AmazonCognitoIdentityAsync}. Convenient
* method forms pass through to the corresponding overload that takes a request
* object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
public class AbstractAmazonCognitoIdentityAsync extends
AbstractAmazonCognitoIdentity implements AmazonCognitoIdentityAsync {
protected AbstractAmazonCognitoIdentityAsync() {
}
@Override
public java.util.concurrent.Future<CreateIdentityPoolResult> createIdentityPoolAsync(
CreateIdentityPoolRequest request) {
return createIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateIdentityPoolResult> createIdentityPoolAsync(
CreateIdentityPoolRequest request,
com.amazonaws.handlers.AsyncHandler<CreateIdentityPoolRequest, CreateIdentityPoolResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteIdentitiesResult> deleteIdentitiesAsync(
DeleteIdentitiesRequest request) {
return deleteIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteIdentitiesResult> deleteIdentitiesAsync(
DeleteIdentitiesRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteIdentitiesRequest, DeleteIdentitiesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> deleteIdentityPoolAsync(
DeleteIdentityPoolRequest request) {
return deleteIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> deleteIdentityPoolAsync(
DeleteIdentityPoolRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteIdentityPoolRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeIdentityResult> describeIdentityAsync(
DescribeIdentityRequest request) {
return describeIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeIdentityResult> describeIdentityAsync(
DescribeIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeIdentityRequest, DescribeIdentityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeIdentityPoolResult> describeIdentityPoolAsync(
DescribeIdentityPoolRequest request) {
return describeIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeIdentityPoolResult> describeIdentityPoolAsync(
DescribeIdentityPoolRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeIdentityPoolRequest, DescribeIdentityPoolResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetCredentialsForIdentityResult> getCredentialsForIdentityAsync(
GetCredentialsForIdentityRequest request) {
return getCredentialsForIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCredentialsForIdentityResult> getCredentialsForIdentityAsync(
GetCredentialsForIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<GetCredentialsForIdentityRequest, GetCredentialsForIdentityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetIdResult> getIdAsync(
GetIdRequest request) {
return getIdAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetIdResult> getIdAsync(
GetIdRequest request,
com.amazonaws.handlers.AsyncHandler<GetIdRequest, GetIdResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetIdentityPoolRolesResult> getIdentityPoolRolesAsync(
GetIdentityPoolRolesRequest request) {
return getIdentityPoolRolesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetIdentityPoolRolesResult> getIdentityPoolRolesAsync(
GetIdentityPoolRolesRequest request,
com.amazonaws.handlers.AsyncHandler<GetIdentityPoolRolesRequest, GetIdentityPoolRolesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenResult> getOpenIdTokenAsync(
GetOpenIdTokenRequest request) {
return getOpenIdTokenAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenResult> getOpenIdTokenAsync(
GetOpenIdTokenRequest request,
com.amazonaws.handlers.AsyncHandler<GetOpenIdTokenRequest, GetOpenIdTokenResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenForDeveloperIdentityResult> getOpenIdTokenForDeveloperIdentityAsync(
GetOpenIdTokenForDeveloperIdentityRequest request) {
return getOpenIdTokenForDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenForDeveloperIdentityResult> getOpenIdTokenForDeveloperIdentityAsync(
GetOpenIdTokenForDeveloperIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<GetOpenIdTokenForDeveloperIdentityRequest, GetOpenIdTokenForDeveloperIdentityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListIdentitiesResult> listIdentitiesAsync(
ListIdentitiesRequest request) {
return listIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListIdentitiesResult> listIdentitiesAsync(
ListIdentitiesRequest request,
com.amazonaws.handlers.AsyncHandler<ListIdentitiesRequest, ListIdentitiesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListIdentityPoolsResult> listIdentityPoolsAsync(
ListIdentityPoolsRequest request) {
return listIdentityPoolsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListIdentityPoolsResult> listIdentityPoolsAsync(
ListIdentityPoolsRequest request,
com.amazonaws.handlers.AsyncHandler<ListIdentityPoolsRequest, ListIdentityPoolsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<LookupDeveloperIdentityResult> lookupDeveloperIdentityAsync(
LookupDeveloperIdentityRequest request) {
return lookupDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<LookupDeveloperIdentityResult> lookupDeveloperIdentityAsync(
LookupDeveloperIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<LookupDeveloperIdentityRequest, LookupDeveloperIdentityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<MergeDeveloperIdentitiesResult> mergeDeveloperIdentitiesAsync(
MergeDeveloperIdentitiesRequest request) {
return mergeDeveloperIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<MergeDeveloperIdentitiesResult> mergeDeveloperIdentitiesAsync(
MergeDeveloperIdentitiesRequest request,
com.amazonaws.handlers.AsyncHandler<MergeDeveloperIdentitiesRequest, MergeDeveloperIdentitiesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> setIdentityPoolRolesAsync(
SetIdentityPoolRolesRequest request) {
return setIdentityPoolRolesAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> setIdentityPoolRolesAsync(
SetIdentityPoolRolesRequest request,
com.amazonaws.handlers.AsyncHandler<SetIdentityPoolRolesRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> unlinkDeveloperIdentityAsync(
UnlinkDeveloperIdentityRequest request) {
return unlinkDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> unlinkDeveloperIdentityAsync(
UnlinkDeveloperIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<UnlinkDeveloperIdentityRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> unlinkIdentityAsync(
UnlinkIdentityRequest request) {
return unlinkIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> unlinkIdentityAsync(
UnlinkIdentityRequest request,
com.amazonaws.handlers.AsyncHandler<UnlinkIdentityRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateIdentityPoolResult> updateIdentityPoolAsync(
UpdateIdentityPoolRequest request) {
return updateIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateIdentityPoolResult> updateIdentityPoolAsync(
UpdateIdentityPoolRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateIdentityPoolRequest, UpdateIdentityPoolResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
package uk.nhs.careconnect.ri.r4.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Identifier;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.hl7.fhir.r4.model.ObservationDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Repository;
import uk.nhs.careconnect.fhir.OperationOutcomeException;
import uk.nhs.careconnect.ri.database.daointerface.ConceptRepository;
import uk.nhs.careconnect.ri.database.daointerface.ValueSetRepository;
import uk.nhs.careconnect.ri.database.entity.codeSystem.CodeSystemEntity;
import uk.nhs.careconnect.ri.database.entity.codeSystem.ConceptEntity;
import uk.nhs.careconnect.ri.database.entity.observationDefinition.ObservationDefinitionCategory;
import uk.nhs.careconnect.ri.database.entity.observationDefinition.ObservationDefinitionEntity;
import uk.nhs.careconnect.ri.database.entity.observationDefinition.ObservationDefinitionIdentifier;
import uk.nhs.careconnect.ri.database.entity.valueSet.ValueSetEntity;
import uk.nhs.careconnect.ri.database.daointerface.ObservationDefinitionRepository;
import uk.nhs.careconnect.ri.r4.dao.transform.ObservationDefinitionEntityToFHIRObservationDefinitionTransformer;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.criteria.*;
import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@Repository
@Transactional
public class ObservationDefinitionDao implements ObservationDefinitionRepository {
@PersistenceContext
EntityManager em;
@Autowired
private LibDaoR4 libDao;
@Autowired
@Lazy
ConceptRepository conceptDao;
@Autowired
@Lazy
ValueSetRepository valueSetDao;
@Autowired
private ObservationDefinitionEntityToFHIRObservationDefinitionTransformer observationDefinitionEntityToFHIRObservationDefinitionTransformer;
//ObservationDefinition observationDefinition;
private static final Logger log = LoggerFactory.getLogger(ObservationDefinitionDao.class);
@Override
public Long count() {
CriteriaBuilder qb = em.getCriteriaBuilder();
CriteriaQuery<Long> cq = qb.createQuery(Long.class);
cq.select(qb.count(cq.from(ObservationDefinitionEntity.class)));
//cq.where(/*your stuff*/);
return em.createQuery(cq).getSingleResult();
}
@Override
public OperationOutcome delete(FhirContext ctx, IdType theId) {
log.trace("Delete OperationDefinition = " + theId.getValue());
ObservationDefinitionEntity observationDefinitionEntity = readEntity(ctx, theId);
if (observationDefinitionEntity == null) return null;
for (ObservationDefinitionIdentifier identifier : observationDefinitionEntity.getIdentifiers()) {
em.remove(identifier);
}
em.remove(observationDefinitionEntity);
return new OperationOutcome();
}
@Override
public ObservationDefinition create(FhirContext ctx, ObservationDefinition observationDefinition, IdType theId) throws OperationOutcomeException {
log.info("ObservationDefinition Create");
if (observationDefinition.hasCode() && observationDefinition.getCode().hasCoding()) {
System.out.println("call came to save ObservationDefinition : system=" + observationDefinition.getCode().getCodingFirstRep().getSystem() + " code=" + observationDefinition.getCode().getCodingFirstRep().getCode());
}
// this.observationDefinition = observationDefinition;
ObservationDefinitionEntity observationDefinitionEntity = null;
System.out.println("id is" + observationDefinition.getIdElement());
long newObservationDefinitionId = -1;
if (observationDefinition.hasId()) {
observationDefinitionEntity = readEntity(ctx, observationDefinition.getIdElement());
}
// Prevent duplicate entries for codes
if (observationDefinition.hasCode() && observationDefinition.getCode().hasCoding()) {
List<ObservationDefinitionEntity> entries = searchEntity(ctx, null,
new TokenOrListParam().add(new TokenParam().setSystem(observationDefinition.getCode().getCodingFirstRep().getSystem()).setValue(observationDefinition.getCode().getCodingFirstRep().getCode()))
, null
, null
,null);
for (ObservationDefinitionEntity nameSys : entries) {
if (observationDefinition.getId() == null) {
throw new ResourceVersionConflictException(observationDefinition.getCode().getCodingFirstRep().getSystem() + " code=" + observationDefinition.getCode().getCodingFirstRep().getCode() + " is already present on the system " + nameSys.getId());
}
if (!nameSys.getId().equals(observationDefinitionEntity.getId())) {
throw new ResourceVersionConflictException(observationDefinition.getCode().getCodingFirstRep().getSystem() + " code=" + observationDefinition.getCode().getCodingFirstRep().getCode() + " is already present on the system " + nameSys.getId());
}
}
}
if (observationDefinitionEntity == null) {
observationDefinitionEntity = new ObservationDefinitionEntity();
}
if (observationDefinition.hasCode()) {
if (observationDefinition.getCode().hasCoding()) {
ConceptEntity code = conceptDao.findAddCode(observationDefinition.getCode().getCoding().get(0));
if (code != null) {
observationDefinitionEntity.setCode(code);
} else {
log.info("Code: Missing System/Code = " + observationDefinition.getCode().getCoding().get(0).getSystem() + " code = " + observationDefinition.getCode().getCoding().get(0).getCode());
throw new IllegalArgumentException("Missing System/Code = " + observationDefinition.getCode().getCoding().get(0).getSystem() + " code = " + observationDefinition.getCode().getCoding().get(0).getCode());
}
}
if (observationDefinition.getCode().hasText()) {
observationDefinitionEntity.setCodeText(observationDefinition.getCode().getText());
}
}
ValueSetEntity valueSetEntity = null;
if (observationDefinition.hasNormalCodedValueSet()) {
org.hl7.fhir.dstu3.model.IdType idType = new org.hl7.fhir.dstu3.model.IdType();
idType.setValueAsString(observationDefinition.getNormalCodedValueSet().getReference());
valueSetEntity = valueSetDao.readEntity(ctx, idType);
if (valueSetEntity != null ) {
observationDefinitionEntity.setNormalValueSet(valueSetEntity);
} else {
observationDefinitionEntity.setNormalValueSet(null);
// throw new ResourceNotFoundException("Normal ValueSet reference was not found");
}
}
if (observationDefinition.hasAbnormalCodedValueSet()) {
org.hl7.fhir.dstu3.model.IdType idType = new org.hl7.fhir.dstu3.model.IdType();
idType.setValueAsString(observationDefinition.getAbnormalCodedValueSet().getReference());
valueSetEntity = valueSetDao.readEntity(ctx, idType);
if (valueSetEntity != null ) {
observationDefinitionEntity.setAbnormalValueSet(valueSetEntity);
} else {
observationDefinitionEntity.setAbnormalValueSet(null);
// throw new ResourceNotFoundException("Abnormal ValueSet reference was not found");
}
}
if (observationDefinition.hasValidCodedValueSet()) {
log.info("Has ValidCodedValueSet");
org.hl7.fhir.dstu3.model.IdType idType = new org.hl7.fhir.dstu3.model.IdType();
idType.setValueAsString(observationDefinition.getValidCodedValueSet().getReference());
valueSetEntity = valueSetDao.readEntity(ctx, idType);
if (valueSetEntity != null ) {
observationDefinitionEntity.setValidValueSet(valueSetEntity);
} else {
observationDefinitionEntity.setValidValueSet(null);
// throw new ResourceNotFoundException("Valid ValueSet reference was not found");
}
}
if (observationDefinition.hasCriticalCodedValueSet()) {
org.hl7.fhir.dstu3.model.IdType idType = new org.hl7.fhir.dstu3.model.IdType();
idType.setValueAsString(observationDefinition.getCriticalCodedValueSet().getReference());
valueSetEntity = valueSetDao.readEntity(ctx, idType);
if (valueSetEntity != null ) {
observationDefinitionEntity.setCriticalValueSet(valueSetEntity);
} else {
observationDefinitionEntity.setCriticalValueSet(null);
// throw new ResourceNotFoundException("Critical ValueSet reference was not found");
}
}
observationDefinitionEntity.setResource(null);
// Removed Id
observationDefinitionEntity.setResource(ctx.newJsonParser().encodeResourceToString(observationDefinition));
log.trace("Call em.persist ObservationDefinitionEntity");
em.persist(observationDefinitionEntity); // persisting Concept Maps observationDefinition
log.info("Called PERSIST id=" + observationDefinitionEntity.getId().toString());
if (observationDefinition.hasIdentifier()) {
for (ObservationDefinitionIdentifier identifier : observationDefinitionEntity.getIdentifiers()) {
em.remove(identifier);
}
for (Identifier identifier : observationDefinition.getIdentifier()) {
ObservationDefinitionIdentifier observationDefinitionIdentifier = new ObservationDefinitionIdentifier();
observationDefinitionIdentifier.setObservationDefinition(observationDefinitionEntity);
observationDefinitionIdentifier = (ObservationDefinitionIdentifier) libDao.setIdentifier(identifier, observationDefinitionIdentifier);
em.persist(observationDefinitionIdentifier);
}
}
for (ObservationDefinitionCategory observationCategory : observationDefinitionEntity.getCategories()) {
em.remove(observationCategory);
}
for (CodeableConcept concept :observationDefinition.getCategory()) {
// Category must have a code 15/Jan/2018 testing with Synthea examples
ObservationDefinitionCategory category = null;
// Look for existing categories
for (ObservationDefinitionCategory cat :observationDefinitionEntity.getCategories()) {
category= cat;
}
if (category == null) category = new ObservationDefinitionCategory();
category.setObservationDefinition(observationDefinitionEntity);
if (concept.hasCoding()) {
ConceptEntity conceptEntity = conceptDao.findAddCode(concept.getCoding().get(0));
if (conceptEntity != null) {
category.setConceptCode(conceptEntity);
}
else {
log.info("Missing Category. System/Code = "+ concept.getCoding().get(0).getSystem() +" code = "+concept.getCoding().get(0).getCode());
throw new IllegalArgumentException("Missing System/Code = "+ concept.getCoding().get(0).getSystem() +" code = "+concept.getCoding().get(0).getCode());
}
}
if (concept.hasText()) {
category.setConceptText(concept.getText());
}
em.persist(category);
observationDefinitionEntity.getCategories().add(category);
}
observationDefinition.setId(observationDefinitionEntity.getId().toString());
return observationDefinitionEntityToFHIRObservationDefinitionTransformer.transform(observationDefinitionEntity, ctx);
}
@Override
public ObservationDefinitionEntity readEntity(FhirContext ctx, IdType theId) {
System.out.println("the id is " + theId.getIdPart());
ObservationDefinitionEntity observationDefinitionEntity = null;
// Only look up if the id is numeric else need to do a search
/* if (daoutils.isNumeric(theId.getIdPart())) {
observationDefinitionEntity =(ObservationDefinitionEntity) em.find(ObservationDefinitionEntity.class, theId.getIdPart());
} */
ObservationDefinitionEntity.class.getName();
// if null try a search on strId
CriteriaBuilder builder = em.getCriteriaBuilder();
if (daoutilsR4.isNumeric(theId.getIdPart())) {
CriteriaQuery<ObservationDefinitionEntity> criteria = builder.createQuery(ObservationDefinitionEntity.class);
Root<ObservationDefinitionEntity> root = criteria.from(ObservationDefinitionEntity.class);
List<Predicate> predList = new LinkedList<Predicate>();
Predicate p = builder.equal(root.<String>get("id"), theId.getIdPart());
predList.add(p);
Predicate[] predArray = new Predicate[predList.size()];
predList.toArray(predArray);
if (predList.size() > 0) {
criteria.select(root).where(predArray);
List<ObservationDefinitionEntity> qryResults = em.createQuery(criteria).getResultList();
for (ObservationDefinitionEntity cme : qryResults) {
observationDefinitionEntity = cme;
break;
}
}
}
// }
return observationDefinitionEntity;
}
@Override
public void save(FhirContext ctx, ObservationDefinitionEntity resource) throws OperationOutcomeException {
return;
}
public ObservationDefinition read(FhirContext ctx, IdType theId) {
log.trace("Retrieving ValueSet = " + theId.getValue());
ObservationDefinitionEntity observationDefinitionEntity = readEntity(ctx, theId);
if (observationDefinitionEntity == null) return null;
ObservationDefinition observationDefinition = observationDefinitionEntityToFHIRObservationDefinitionTransformer.transform(observationDefinitionEntity, ctx);
if (observationDefinitionEntity.getResource() == null) {
String resource = ctx.newJsonParser().encodeResourceToString(observationDefinition);
if (resource.length() < 10000) {
observationDefinitionEntity.setResource(resource);
em.persist(observationDefinitionEntity);
}
}
return observationDefinition;
}
@Override
public List<ObservationDefinition> search(FhirContext ctx, TokenParam category, TokenOrListParam code, TokenParam identifier, StringParam name, StringParam id) {
List<ObservationDefinitionEntity> qryResults = searchEntity(ctx, category, code, identifier, name, id);
List<ObservationDefinition> results = new ArrayList<>();
for (ObservationDefinitionEntity observationDefinitionEntity : qryResults) {
ObservationDefinition observationDefinition = observationDefinitionEntityToFHIRObservationDefinitionTransformer.transform(observationDefinitionEntity, ctx);
results.add(observationDefinition);
}
return results;
}
@Override
public List<ObservationDefinitionEntity> searchEntity(FhirContext ctx, TokenParam category, TokenOrListParam codes, TokenParam identifier, StringParam name, StringParam id) {
List<ObservationDefinitionEntity> qryResults = null;
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<ObservationDefinitionEntity> criteria = builder.createQuery(ObservationDefinitionEntity.class);
Root<ObservationDefinitionEntity> root = criteria.from(ObservationDefinitionEntity.class);
List<Predicate> predList = new LinkedList<>();
if (id != null) {
Predicate p = builder.equal(root.get("id"),id.getValue());
predList.add(p);
}
if (category!=null) {
log.trace("Search on ObservationDefinition.category code = "+category.getValue());
Join<ObservationDefinitionEntity, ObservationDefinitionCategory> join = root.join("categories", JoinType.LEFT);
Join<ObservationDefinitionCategory, ConceptEntity> joinConcept = join.join("category", JoinType.LEFT);
Predicate p = builder.equal(joinConcept.get("code"),category.getValue());
predList.add(p);
}
if (name!=null) {
log.trace("Search on ObservationDefinition name = "+name.getValue());
Join<ObservationDefinitionEntity,
ConceptEntity> joinConcept = root.join("code", JoinType.LEFT);
Predicate p = builder.like(
builder.upper(joinConcept.get("myDisplay").as(String.class)),
builder.upper(builder.literal("%" + name.getValue() + "%"))
);
predList.add(p);
}
if (codes!=null) {
List<Predicate> predOrList = new LinkedList<>();
Join<ObservationDefinitionEntity, ConceptEntity> joinConcept = root.join("code", JoinType.LEFT);
Join<ConceptEntity, CodeSystemEntity> joinCodeSystem = joinConcept.join("codeSystemEntity", JoinType.LEFT);
for (TokenParam code : codes.getValuesAsQueryTokens()) {
log.trace("Search on Observation.code code = " + code.getValue());
Predicate p = null;
if (code.getSystem() != null) {
p = builder.and(builder.equal(joinCodeSystem.get("codeSystemUri"), code.getSystem()),builder.equal(joinConcept.get("code"), code.getValue()));
} else {
p = builder.equal(joinConcept.get("code"), code.getValue());
}
predOrList.add(p);
}
if (predOrList.size()>0) {
Predicate p = builder.or(predOrList.toArray(new Predicate[0]));
predList.add(p);
}
}
if (identifier !=null)
{
Join<ObservationDefinitionEntity, ObservationDefinitionIdentifier> join = root.join("identifiers", JoinType.LEFT);
Predicate p = builder.equal(join.get("value"),identifier.getValue());
predList.add(p);
// TODO predList.add(builder.equal(join.get("system"),identifier.getSystem()));
}
Predicate[] predArray = new Predicate[predList.size()];
predList.toArray(predArray);
if (predList.size() > 0) {
criteria.select(root).where(predArray);
} else {
criteria.select(root);
}
qryResults = em.createQuery(criteria).setMaxResults(100).getResultList();
return qryResults;
}
}
| |
/*
* Copyright 2010 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.asynchttpclient.async;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import org.asynchttpclient.FluentCaseInsensitiveStringsMap;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
public class FluentCaseInsensitiveStringsMapTest {
@Test
public void emptyTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
assertTrue(map.keySet().isEmpty());
}
@Test
public void normalTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void nameCaseTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("fOO", "bAr");
map.add("Baz", Arrays.asList("fOo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("fOO", "Baz")));
assertEquals(map.getFirstValue("fOO"), "bAr");
assertEquals(map.getJoinedValue("fOO", ", "), "bAr");
assertEquals(map.get("fOO"), Arrays.asList("bAr"));
assertEquals(map.getFirstValue("foo"), "bAr");
assertEquals(map.getJoinedValue("foo", ", "), "bAr");
assertEquals(map.get("foo"), Arrays.asList("bAr"));
assertEquals(map.getFirstValue("FOO"), "bAr");
assertEquals(map.getJoinedValue("FOO", ", "), "bAr");
assertEquals(map.get("FOO"), Arrays.asList("bAr"));
assertEquals(map.getFirstValue("Baz"), "fOo");
assertEquals(map.getJoinedValue("Baz", ", "), "fOo, bar");
assertEquals(map.get("Baz"), Arrays.asList("fOo", "bar"));
assertEquals(map.getFirstValue("baz"), "fOo");
assertEquals(map.getJoinedValue("baz", ", "), "fOo, bar");
assertEquals(map.get("baz"), Arrays.asList("fOo", "bar"));
assertEquals(map.getFirstValue("BAZ"), "fOo");
assertEquals(map.getJoinedValue("BAZ", ", "), "fOo, bar");
assertEquals(map.get("BAZ"), Arrays.asList("fOo", "bar"));
}
@Test
public void sameKeyMultipleTimesTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "baz,foo");
map.add("Foo", Arrays.asList("bar"));
map.add("fOO", "bla", "blubb");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo")));
assertEquals(map.getFirstValue("foo"), "baz,foo");
assertEquals(map.getJoinedValue("foo", ", "), "baz,foo, bar, bla, blubb");
assertEquals(map.get("foo"), Arrays.asList("baz,foo", "bar", "bla", "blubb"));
assertEquals(map.getFirstValue("Foo"), "baz,foo");
assertEquals(map.getJoinedValue("Foo", ", "), "baz,foo, bar, bla, blubb");
assertEquals(map.get("Foo"), Arrays.asList("baz,foo", "bar", "bla", "blubb"));
assertEquals(map.getFirstValue("fOO"), "baz,foo");
assertEquals(map.getJoinedValue("fOO", ", "), "baz,foo, bar, bla, blubb");
assertEquals(map.get("fOO"), Arrays.asList("baz,foo", "bar", "bla", "blubb"));
}
@Test
public void emptyValueTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo")));
assertEquals(map.getFirstValue("foo"), "");
assertEquals(map.getJoinedValue("foo", ", "), "");
assertEquals(map.get("foo"), Arrays.asList(""));
}
@Test
public void nullValueTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", (String) null);
assertEquals(map.getFirstValue("foo"), "");
assertEquals(map.getJoinedValue("foo", ", "), "");
assertEquals(map.get("foo").size(), 1);
}
@Test
public void mapConstructorTest() {
Map<String, Collection<String>> headerMap = new LinkedHashMap<String, Collection<String>>();
headerMap.put("foo", Arrays.asList("baz,foo"));
headerMap.put("baz", Arrays.asList("bar"));
headerMap.put("bar", Arrays.asList("bla", "blubb"));
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap(headerMap);
headerMap.remove("foo");
headerMap.remove("bar");
headerMap.remove("baz");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar")));
assertEquals(map.getFirstValue("foo"), "baz,foo");
assertEquals(map.getJoinedValue("foo", ", "), "baz,foo");
assertEquals(map.get("foo"), Arrays.asList("baz,foo"));
assertEquals(map.getFirstValue("baz"), "bar");
assertEquals(map.getJoinedValue("baz", ", "), "bar");
assertEquals(map.get("baz"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "bla");
assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb");
assertEquals(map.get("bar"), Arrays.asList("bla", "blubb"));
}
@Test
public void mapConstructorNullTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap((Map<String, Collection<String>>) null);
assertEquals(map.keySet().size(), 0);
}
@Test
public void copyConstructorTest() {
FluentCaseInsensitiveStringsMap srcHeaders = new FluentCaseInsensitiveStringsMap();
srcHeaders.add("foo", "baz,foo");
srcHeaders.add("baz", Arrays.asList("bar"));
srcHeaders.add("bar", "bla", "blubb");
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap(srcHeaders);
srcHeaders.delete("foo");
srcHeaders.delete("bar");
srcHeaders.delete("baz");
assertTrue(srcHeaders.keySet().isEmpty());
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar")));
assertEquals(map.getFirstValue("foo"), "baz,foo");
assertEquals(map.getJoinedValue("foo", ", "), "baz,foo");
assertEquals(map.get("foo"), Arrays.asList("baz,foo"));
assertEquals(map.getFirstValue("baz"), "bar");
assertEquals(map.getJoinedValue("baz", ", "), "bar");
assertEquals(map.get("baz"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "bla");
assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb");
assertEquals(map.get("bar"), Arrays.asList("bla", "blubb"));
}
@Test
public void copyConstructorNullTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap((FluentCaseInsensitiveStringsMap) null);
assertEquals(map.keySet().size(), 0);
}
@Test
public void deleteTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.delete("bAz");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertNull(map.getFirstValue("baz"));
assertNull(map.getJoinedValue("baz", ", "));
assertNull(map.get("baz"));
}
@Test
public void deleteUndefinedKeyTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.delete("bar");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void deleteNullTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.delete(null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void deleteAllArrayTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.deleteAll("bAz", "Boo");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertNull(map.getFirstValue("baz"));
assertNull(map.getJoinedValue("baz", ", "));
assertNull(map.get("baz"));
}
@Test
public void deleteAllCollectionTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.deleteAll(Arrays.asList("bAz", "fOO"));
assertEquals(map.keySet(), Collections.<String> emptyList());
assertNull(map.getFirstValue("foo"));
assertNull(map.getJoinedValue("foo", ", "));
assertNull(map.get("foo"));
assertNull(map.getFirstValue("baz"));
assertNull(map.getJoinedValue("baz", ", "));
assertNull(map.get("baz"));
}
@Test
public void deleteAllNullArrayTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.deleteAll((String[]) null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void deleteAllNullCollectionTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.deleteAll((Collection<String>) null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void replaceTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replace("Foo", "blub", "bla");
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("Foo", "baz")));
assertEquals(map.getFirstValue("foo"), "blub");
assertEquals(map.getJoinedValue("foo", ", "), "blub, bla");
assertEquals(map.get("foo"), Arrays.asList("blub", "bla"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void replaceUndefinedTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replace("bar", Arrays.asList("blub"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
assertEquals(map.getFirstValue("bar"), "blub");
assertEquals(map.getJoinedValue("bar", ", "), "blub");
assertEquals(map.get("bar"), Arrays.asList("blub"));
}
@Test
public void replaceNullTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replace(null, Arrays.asList("blub"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void replaceValueWithNullTest() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replace("baZ", (Collection<String>) null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertNull(map.getFirstValue("baz"));
assertNull(map.getJoinedValue("baz", ", "));
assertNull(map.get("baz"));
}
@Test
public void replaceAllMapTest1() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("bar", "foo, bar", "baz");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replaceAll(new FluentCaseInsensitiveStringsMap().add("Bar", "baz").add("Boo", "blub", "bla"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "Bar", "baz", "Boo")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "baz");
assertEquals(map.getJoinedValue("bar", ", "), "baz");
assertEquals(map.get("bar"), Arrays.asList("baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
assertEquals(map.getFirstValue("Boo"), "blub");
assertEquals(map.getJoinedValue("Boo", ", "), "blub, bla");
assertEquals(map.get("Boo"), Arrays.asList("blub", "bla"));
}
@Test
public void replaceAllTest2() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("bar", "foo, bar", "baz");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
LinkedHashMap<String, Collection<String>> newValues = new LinkedHashMap<String, Collection<String>>();
newValues.put("Bar", Arrays.asList("baz"));
newValues.put("Foo", null);
map.replaceAll(newValues);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("Bar", "baz")));
assertNull(map.getFirstValue("foo"));
assertNull(map.getJoinedValue("foo", ", "));
assertNull(map.get("foo"));
assertEquals(map.getFirstValue("bar"), "baz");
assertEquals(map.getJoinedValue("bar", ", "), "baz");
assertEquals(map.get("bar"), Arrays.asList("baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void replaceAllNullTest1() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("bar", "foo, bar", "baz");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replaceAll((FluentCaseInsensitiveStringsMap) null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
@Test
public void replaceAllNullTest2() {
FluentCaseInsensitiveStringsMap map = new FluentCaseInsensitiveStringsMap();
map.add("foo", "bar");
map.add("bar", "foo, bar", "baz");
map.add("baz", Arrays.asList("foo", "bar"));
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
map.replaceAll((Map<String, Collection<String>>) null);
assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz")));
assertEquals(map.getFirstValue("foo"), "bar");
assertEquals(map.getJoinedValue("foo", ", "), "bar");
assertEquals(map.get("foo"), Arrays.asList("bar"));
assertEquals(map.getFirstValue("bar"), "foo, bar");
assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz");
assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz"));
assertEquals(map.getFirstValue("baz"), "foo");
assertEquals(map.getJoinedValue("baz", ", "), "foo, bar");
assertEquals(map.get("baz"), Arrays.asList("foo", "bar"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.binary.BinaryInvalidTypeException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.DeploymentMode;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.managers.deployment.GridDeployment;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfoBean;
import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter;
import org.apache.ignite.internal.util.IgniteUtils;
import org.apache.ignite.internal.util.lang.GridPeerDeployAware;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.CA;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.LT;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentLinkedHashMap;
import static org.apache.ignite.configuration.DeploymentMode.CONTINUOUS;
import static org.apache.ignite.configuration.DeploymentMode.ISOLATED;
import static org.apache.ignite.configuration.DeploymentMode.PRIVATE;
import static org.apache.ignite.events.EventType.EVT_NODE_FAILED;
import static org.apache.ignite.events.EventType.EVT_NODE_LEFT;
/**
* Deployment manager for cache.
*/
public class GridCacheDeploymentManager<K, V> extends GridCacheSharedManagerAdapter<K, V> {
/** Cache class loader */
private volatile ClassLoader globalLdr;
/** Undeploys. */
private final Map<String, List<CA>> undeploys = new HashMap<>();
/** Per-thread deployment context. */
private ConcurrentMap<IgniteUuid, CachedDeploymentInfo<K, V>> deps = new ConcurrentHashMap<>();
/** Collection of all known participants (Node ID -> Loader ID). */
private Map<UUID, IgniteUuid> allParticipants = new ConcurrentHashMap<>();
/** Discovery listener. */
private GridLocalEventListener discoLsnr;
/** Local deployment. */
private final AtomicReference<GridDeployment> locDep = new AtomicReference<>();
/** */
private final ThreadLocal<Boolean> ignoreOwnership = new ThreadLocal<Boolean>() {
@Override protected Boolean initialValue() {
return false;
}
};
/** */
private boolean depEnabled;
/** {@inheritDoc} */
@Override public void start0() throws IgniteCheckedException {
globalLdr = new CacheClassLoader(cctx.gridConfig().getClassLoader());
depEnabled = cctx.gridDeploy().enabled();
if (depEnabled) {
discoLsnr = new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
assert evt.type() == EVT_NODE_FAILED || evt.type() == EVT_NODE_LEFT : "Unexpected event: " + evt;
UUID id = ((DiscoveryEvent)evt).eventNode().id();
if (log.isDebugEnabled())
log.debug("Processing node departure: " + id);
for (Map.Entry<IgniteUuid, CachedDeploymentInfo<K, V>> entry : deps.entrySet()) {
CachedDeploymentInfo<K, V> d = entry.getValue();
if (log.isDebugEnabled())
log.debug("Examining cached info: " + d);
if (d.senderId().equals(id) || d.removeParticipant(id)) {
deps.remove(entry.getKey(), d);
if (log.isDebugEnabled())
log.debug("Removed cached info [d=" + d + ", deps=" + deps + ']');
}
}
allParticipants.remove(id);
}
};
cctx.gridEvents().addLocalEventListener(discoLsnr, EVT_NODE_LEFT, EVT_NODE_FAILED);
}
}
/** {@inheritDoc} */
@Override protected void stop0(boolean cancel) {
if (discoLsnr != null)
cctx.gridEvents().removeLocalEventListener(discoLsnr);
}
/**
* @return Local-only class loader.
*/
public ClassLoader localLoader() {
GridDeployment dep = locDep.get();
return dep == null ? U.gridClassLoader() : dep.classLoader();
}
/**
* Gets distributed class loader. Note that
* {@link #p2pContext(UUID, IgniteUuid, String, DeploymentMode, Map)} must be
* called from the same thread prior to using this class loader, or the
* loading may happen for the wrong node or context.
*
* @return Cache class loader.
*/
public ClassLoader globalLoader() {
return globalLdr;
}
/**
* Callback on method enter.
*/
public void onEnter() {
// No-op.
}
/**
* @param ignore {@code True} to ignore.
*/
public boolean ignoreOwnership(boolean ignore) {
boolean old = ignoreOwnership.get();
ignoreOwnership.set(ignore);
return old;
}
/**
* Undeploy all queued up closures.
*
* @param ctx Cache context.
*/
public void unwind(GridCacheContext ctx) {
List<CA> q;
synchronized (undeploys) {
q = undeploys.remove(ctx.name());
}
if (q == null)
return;
int cnt = 0;
for (CA c : q) {
c.apply();
cnt++;
}
if (log.isDebugEnabled())
log.debug("Unwound undeploys count: " + cnt);
}
/**
* Undeploys given class loader.
*
* @param ldr Class loader to undeploy.
* @param ctx Grid cache context.
*/
public void onUndeploy(final ClassLoader ldr, final GridCacheContext<K, V> ctx) {
assert ldr != null;
if (log.isDebugEnabled())
log.debug("Received onUndeploy() request [ldr=" + ldr + ", cctx=" + cctx + ']');
synchronized (undeploys) {
List<CA> queue = undeploys.get(ctx.name());
if (queue == null)
undeploys.put(ctx.name(), queue = new ArrayList<>());
queue.add(new CA() {
@Override public void apply() {
onUndeploy0(ldr, ctx);
}
});
}
// Unwind immediately for local and replicate caches.
// We go through preloader for proper synchronization.
if (ctx.isLocal())
ctx.preloader().unwindUndeploys();
}
/**
* @param ldr Loader.
* @param cacheCtx Cache context.
*/
private void onUndeploy0(final ClassLoader ldr, final GridCacheContext<K, V> cacheCtx) {
GridCacheAdapter<K, V> cache = cacheCtx.cache();
Collection<KeyCacheObject> keys = new ArrayList<>();
addEntries(ldr, keys, cache);
if (cache.isNear())
addEntries(ldr, keys, (((GridNearCacheAdapter)cache).dht()));
if (log.isDebugEnabled())
log.debug("Finished searching keys for undeploy [keysCnt=" + keys.size() + ']');
cache.clearLocally(keys, true);
if (cacheCtx.isNear())
cacheCtx.near().dht().clearLocally(keys, true);
// Examine swap for entries to undeploy.
int swapUndeployCnt = cacheCtx.offheap().onUndeploy(ldr);
if (cacheCtx.userCache() && (!keys.isEmpty() || swapUndeployCnt != 0)) {
U.quietAndWarn(log, "");
U.quietAndWarn(
log,
"Cleared all cache entries for undeployed class loader [cacheName=" + cacheCtx.name() +
", undeployCnt=" + keys.size() + ", swapUndeployCnt=" + swapUndeployCnt +
", clsLdr=" + ldr.getClass().getName() + ']');
U.quietAndWarn(
log,
" ^-- Cache auto-undeployment happens in SHARED deployment mode " +
"(to turn off, switch to CONTINUOUS mode)");
U.quietAndWarn(log, "");
}
// Avoid class caching issues inside classloader.
globalLdr = new CacheClassLoader();
}
/**
* @param ldr Class loader.
* @param keys Keys.
* @param cache Cache.
*/
private void addEntries(ClassLoader ldr, Collection<KeyCacheObject> keys, GridCacheAdapter cache) {
GridCacheContext cacheCtx = cache.context();
for (GridCacheEntryEx e : (Iterable<GridCacheEntryEx>)cache.entries()) {
boolean undeploy = cacheCtx.isNear() ?
undeploy(ldr, e, cacheCtx.near()) || undeploy(ldr, e, cacheCtx.near().dht()) :
undeploy(ldr, e, cacheCtx.cache());
if (undeploy)
keys.add(e.key());
}
}
/**
* @param ldr Class loader.
* @param e Entry.
* @param cache Cache.
* @return {@code True} if need to undeploy.
*/
private boolean undeploy(ClassLoader ldr, GridCacheEntryEx e, GridCacheAdapter cache) {
KeyCacheObject key = e.key();
GridCacheEntryEx entry = cache.peekEx(key);
if (entry == null)
return false;
Object key0;
Object val0;
try {
CacheObject v = entry.peek();
key0 = key.value(cache.context().cacheObjectContext(), false);
assert key0 != null : "Key cannot be null for cache entry: " + e;
val0 = CU.value(v, cache.context(), false);
}
catch (GridCacheEntryRemovedException ignore) {
return false;
}
catch (BinaryInvalidTypeException ex) {
log.error("An attempt to undeploy cache with binary objects.", ex);
return false;
}
catch (IgniteCheckedException | IgniteException ignore) {
// Peek can throw runtime exception if unmarshalling failed.
return true;
}
ClassLoader keyLdr = U.detectObjectClassLoader(key0);
ClassLoader valLdr = U.detectObjectClassLoader(val0);
boolean res = F.eq(ldr, keyLdr) || F.eq(ldr, valLdr);
if (log.isDebugEnabled())
log.debug(S.toString("Finished examining entry",
"entryCls", e.getClass(), true,
"key", key0, true,
"keyCls", key0.getClass(), true,
"valCls", (val0 != null ? val0.getClass() : "null"), true,
"keyLdr", keyLdr, false,
"valLdr", valLdr, false,
"res", res, false));
return res;
}
/**
* @param sndId Sender node ID.
* @param ldrId Loader ID.
* @param userVer User version.
* @param mode Deployment mode.
* @param participants Node participants.
*/
public void p2pContext(
UUID sndId,
IgniteUuid ldrId,
String userVer,
DeploymentMode mode,
Map<UUID, IgniteUuid> participants
) {
assert depEnabled;
if (mode == PRIVATE || mode == ISOLATED) {
ClusterNode node = cctx.discovery().node(sndId);
if (node == null) {
if (log.isDebugEnabled())
log.debug("Ignoring p2p context (sender has left) [sndId=" + sndId + ", ldrId=" + ldrId +
", userVer=" + userVer + ", mode=" + mode + ", participants=" + participants + ']');
return;
}
boolean daemon = node.isDaemon();
// Always output in debug.
if (log.isDebugEnabled())
log.debug("Ignoring deployment in PRIVATE or ISOLATED mode [sndId=" + sndId + ", ldrId=" + ldrId +
", userVer=" + userVer + ", mode=" + mode + ", participants=" + participants +
", daemon=" + daemon + ']');
if (!daemon) {
LT.warn(log, "Ignoring deployment in PRIVATE or ISOLATED mode " +
"[sndId=" + sndId + ", ldrId=" + ldrId + ", userVer=" + userVer + ", mode=" + mode +
", participants=" + participants + ", daemon=" + daemon + ']');
}
return;
}
if (mode != cctx.gridConfig().getDeploymentMode()) {
LT.warn(log, "Local and remote deployment mode mismatch (please fix configuration and restart) " +
"[locDepMode=" + cctx.gridConfig().getDeploymentMode() + ", rmtDepMode=" + mode + ", rmtNodeId=" +
sndId + ']');
return;
}
if (log.isDebugEnabled())
log.debug("Setting p2p context [sndId=" + sndId + ", ldrId=" + ldrId + ", userVer=" + userVer +
", seqNum=" + ldrId.localId() + ", mode=" + mode + ", participants=" + participants +
", locDepOwner=false]");
CachedDeploymentInfo<K, V> depInfo;
while (true) {
depInfo = deps.get(ldrId);
if (depInfo == null) {
depInfo = new CachedDeploymentInfo<>(sndId, ldrId, userVer, mode, participants);
CachedDeploymentInfo<K, V> old = deps.putIfAbsent(ldrId, depInfo);
if (old != null)
depInfo = old;
else
break;
}
if (participants != null) {
if (!depInfo.addParticipants(participants, cctx)) {
deps.remove(ldrId, depInfo);
continue;
}
}
break;
}
if (cctx.discovery().node(sndId) == null) {
// Sender has left.
deps.remove(ldrId, depInfo);
allParticipants.remove(sndId);
}
if (participants != null) {
for (UUID id : participants.keySet()) {
if (cctx.discovery().node(id) == null) {
if (depInfo.removeParticipant(id))
deps.remove(ldrId, depInfo);
allParticipants.remove(id);
}
}
}
}
/**
* Adds deployment info to deployment contexts queue.
*
* @param info Info to add.
*/
public void addDeploymentContext(GridDeploymentInfo info) {
IgniteUuid ldrId = info.classLoaderId();
while (true) {
CachedDeploymentInfo<K, V> depInfo = deps.get(ldrId);
if (depInfo == null) {
depInfo = new CachedDeploymentInfo<>(ldrId.globalId(), ldrId, info.userVersion(), info.deployMode(),
info.participants());
CachedDeploymentInfo<K, V> old = deps.putIfAbsent(ldrId, depInfo);
if (old != null)
depInfo = old;
else
break;
}
Map<UUID, IgniteUuid> participants = info.participants();
if (participants != null) {
if (!depInfo.addParticipants(participants, cctx)) {
deps.remove(ldrId, depInfo);
continue;
}
}
break;
}
}
/**
* @param sndNodeId Sender node ID.
* @param sndLdrId Sender loader ID.
* @param participants Participants.
* @param locDepOwner {@code True} if local deployment owner.
* @return Added participants.
*/
@Nullable private Map<UUID, IgniteUuid> addGlobalParticipants(UUID sndNodeId, IgniteUuid sndLdrId,
Map<UUID, IgniteUuid> participants, boolean locDepOwner) {
Map<UUID, IgniteUuid> added = null;
if (participants != null) {
for (Map.Entry<UUID, IgniteUuid> entry : participants.entrySet()) {
UUID nodeId = entry.getKey();
IgniteUuid ldrVer = entry.getValue();
if (!ldrVer.equals(allParticipants.get(nodeId))) {
allParticipants.put(nodeId, ldrVer);
if (added == null)
added = IgniteUtils.newHashMap(participants.size());
added.put(nodeId, ldrVer);
}
}
}
if (locDepOwner) {
assert sndNodeId != null;
assert sndLdrId != null;
if (!sndLdrId.equals(allParticipants.get(sndNodeId))) {
allParticipants.put(sndNodeId, sndLdrId);
if (added == null)
added = U.newHashMap(1);
added.put(sndNodeId, sndLdrId);
}
}
return added;
}
/**
* Register local classes.
*
* @param objs Objects to register.
* @throws IgniteCheckedException If registration failed.
*/
public void registerClasses(Object... objs) throws IgniteCheckedException {
registerClasses(F.asList(objs));
}
/**
* Register local classes.
*
* @param objs Objects to register.
* @throws IgniteCheckedException If registration failed.
*/
public void registerClasses(Iterable<?> objs) throws IgniteCheckedException {
if (objs != null)
for (Object o : objs)
registerClass(o);
}
/**
* @param obj Object whose class to register.
* @throws IgniteCheckedException If failed.
*/
public void registerClass(Object obj) throws IgniteCheckedException {
if (obj == null)
return;
if (obj instanceof GridPeerDeployAware) {
GridPeerDeployAware p = (GridPeerDeployAware)obj;
registerClass(p.deployClass(), p.classLoader());
}
else
registerClass(obj instanceof Class ? (Class)obj : obj.getClass());
}
/**
* @param cls Class to register.
* @throws IgniteCheckedException If failed.
*/
public void registerClass(Class<?> cls) throws IgniteCheckedException {
if (cls == null)
return;
registerClass(cls, U.detectClassLoader(cls));
}
/**
* @param cls Class to register.
* @param ldr Class loader.
* @throws IgniteCheckedException If registration failed.
*/
public void registerClass(Class<?> cls, ClassLoader ldr) throws IgniteCheckedException {
assert cctx.deploymentEnabled();
if (cls == null || GridCacheInternal.class.isAssignableFrom(cls))
return;
if (ldr == null)
ldr = U.detectClassLoader(cls);
// Don't register remote class loaders.
if (U.p2pLoader(ldr))
return;
GridDeployment dep = locDep.get();
if (dep == null || (!ldr.equals(dep.classLoader()) && !U.hasParent(ldr, dep.classLoader()))) {
while (true) {
dep = locDep.get();
// Don't register remote class loaders.
if (dep != null && !dep.local())
return;
if (dep != null) {
ClassLoader curLdr = dep.classLoader();
if (curLdr.equals(ldr))
break;
// If current deployment is either system loader or GG loader,
// then we don't check it, as new loader is most likely wider.
if (!curLdr.equals(U.gridClassLoader()) && dep.deployedClass(cls.getName()) != null)
// Local deployment can load this class already, so no reason
// to look for another class loader.
break;
}
GridDeployment newDep = cctx.gridDeploy().deploy(cls, ldr);
if (newDep != null) {
if (dep != null) {
// Check new deployment.
if (newDep.deployedClass(dep.sampleClassName()) != null) {
if (locDep.compareAndSet(dep, newDep))
break; // While loop.
}
else
throw new IgniteCheckedException("Encountered incompatible class loaders for cache " +
"[class1=" + cls.getName() + ", class2=" + dep.sampleClassName() + ']');
}
else if (locDep.compareAndSet(null, newDep))
break; // While loop.
}
else
throw new IgniteCheckedException("Failed to deploy class for local deployment [clsName=" + cls.getName() +
", ldr=" + ldr + ']');
}
}
}
/**
* Prepares deployable object.
*
* @param deployable Deployable object.
*/
public void prepare(GridCacheDeployable deployable) {
assert depEnabled;
// Only set deployment info if it was not set automatically.
if (deployable.deployInfo() == null) {
GridDeploymentInfoBean dep = globalDeploymentInfo();
if (dep == null) {
GridDeployment locDep0 = locDep.get();
if (locDep0 != null) {
// Will copy sequence number to bean.
dep = new GridDeploymentInfoBean(locDep0);
}
}
if (dep != null)
deployable.prepare(dep);
if (log.isDebugEnabled())
log.debug("Prepared grid cache deployable [dep=" + dep + ", deployable=" + deployable + ']');
}
}
/**
* @return First global deployment.
*/
@Nullable public GridDeploymentInfoBean globalDeploymentInfo() {
assert depEnabled;
// Do not return info if mode is CONTINUOUS.
// In this case deployment info will be set by GridCacheMessage.prepareObject().
if (cctx.gridConfig().getDeploymentMode() == CONTINUOUS)
return null;
for (CachedDeploymentInfo<K, V> d : deps.values()) {
if (cctx.discovery().node(d.senderId()) == null)
// Sender has left.
continue;
// Participants map.
Map<UUID, IgniteUuid> participants = d.participants();
if (participants != null) {
for (UUID id : participants.keySet()) {
if (cctx.discovery().node(id) != null) {
// At least 1 participant is still in the grid.
return new GridDeploymentInfoBean(
d.loaderId(),
d.userVersion(),
d.mode(),
participants
);
}
}
}
}
return null;
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>> ");
X.println(">>> Cache deployment manager memory stats [igniteInstanceName=" + cctx.igniteInstanceName() + ']');
X.println(">>> Undeploys: " + undeploys.size());
X.println(">>> Cached deployments: " + deps.size());
X.println(">>> All participants: " + allParticipants.size());
}
/**
* @param ldr Class loader to get ID for.
* @return ID for given class loader or {@code null} if given loader is not
* grid deployment class loader.
*/
@Nullable public IgniteUuid getClassLoaderId(@Nullable ClassLoader ldr) {
if (ldr == null)
return null;
return cctx.gridDeploy().getClassLoaderId(ldr);
}
/**
* @param ldrId Class loader ID.
* @return Class loader ID or {@code null} if loader not found.
*/
@Nullable public ClassLoader getClassLoader(IgniteUuid ldrId) {
assert ldrId != null;
GridDeployment dep = cctx.gridDeploy().getDeployment(ldrId);
return dep != null ? dep.classLoader() : null;
}
/**
* @return {@code True} if context class loader is global.
*/
public boolean isGlobalLoader() {
return cctx.gridDeploy().isGlobalLoader(Thread.currentThread().getContextClassLoader());
}
/**
* Cache class loader.
*/
private class CacheClassLoader extends ClassLoader implements CacheClassLoaderMarker {
/** */
private final String[] p2pExclude;
/**
* Sets context class loader as parent.
*/
private CacheClassLoader() {
this(U.detectClassLoader(GridCacheDeploymentManager.class));
}
/**
* Sets context class loader.
* If user's class loader is null then will be used default class loader.
*
* @param classLdr User's class loader.
*/
private CacheClassLoader(ClassLoader classLdr) {
super(classLdr != null ? classLdr : U.detectClassLoader(GridCacheDeploymentManager.class));
p2pExclude = cctx.gridConfig().getPeerClassLoadingLocalClassPathExclude();
}
/** {@inheritDoc} */
@Override public Class<?> loadClass(String name) throws ClassNotFoundException {
// Always delegate to deployment manager.
return findClass(name);
}
/** {@inheritDoc} */
@Override protected Class<?> findClass(String name) throws ClassNotFoundException {
// Try local deployment first.
if (!isLocallyExcluded(name)) {
GridDeployment d = cctx.gridDeploy().getLocalDeployment(name);
if (d != null) {
Class cls = d.deployedClass(name);
if (cls != null)
return cls;
}
}
for (CachedDeploymentInfo<K, V> t : deps.values()) {
UUID sndId = t.senderId();
IgniteUuid ldrId = t.loaderId();
String userVer = t.userVersion();
DeploymentMode mode = t.mode();
Map<UUID, IgniteUuid> participants = t.participants();
GridDeployment d = cctx.gridDeploy().getGlobalDeployment(
mode,
name,
name,
userVer,
sndId,
ldrId,
participants,
F.<ClusterNode>alwaysTrue());
if (d != null) {
Class cls = d.deployedClass(name);
if (cls != null)
return cls;
}
}
Class cls = getParent().loadClass(name);
if (cls != null)
return cls;
throw new ClassNotFoundException("Failed to load class [name=" + name+ ", ctx=" + deps + ']');
}
/**
* @param name Name of the class.
* @return {@code True} if locally excluded.
*/
private boolean isLocallyExcluded(String name) {
if (p2pExclude != null) {
for (String path : p2pExclude) {
// Remove star (*) at the end.
if (path.endsWith("*"))
path = path.substring(0, path.length() - 1);
if (name.startsWith(path))
return true;
}
}
return false;
}
}
/**
*
*/
private static class CachedDeploymentInfo<K, V> {
/** */
private final UUID sndId;
/** */
private final IgniteUuid ldrId;
/** */
private final String userVer;
/** */
private final DeploymentMode depMode;
/** */
@GridToStringInclude
private Map<UUID, IgniteUuid> participants;
/** Read write lock for adding and removing participants. */
private final ReadWriteLock participantsLock = new ReentrantReadWriteLock();
/**
* @param sndId Sender.
* @param ldrId Loader ID.
* @param userVer User version.
* @param depMode Deployment mode.
* @param participants Participants.
*/
private CachedDeploymentInfo(UUID sndId, IgniteUuid ldrId, String userVer, DeploymentMode depMode,
Map<UUID, IgniteUuid> participants) {
assert sndId.equals(ldrId.globalId()) || participants != null;
this.sndId = sndId;
this.ldrId = ldrId;
this.userVer = userVer;
this.depMode = depMode;
this.participants = participants == null || participants.isEmpty() ? null :
new ConcurrentLinkedHashMap<>(participants);
}
/**
* @param newParticipants Participants to add.
* @param cctx Cache context.
* @return {@code True} if cached info is valid.
*/
boolean addParticipants(Map<UUID, IgniteUuid> newParticipants, GridCacheSharedContext<K, V> cctx) {
participantsLock.readLock().lock();
try {
if (participants != null && participants.isEmpty())
return false;
for (Map.Entry<UUID, IgniteUuid> e : newParticipants.entrySet()) {
assert e.getKey().equals(e.getValue().globalId());
if (cctx.discovery().node(e.getKey()) != null)
// Participant has left.
continue;
if (participants == null)
participants = new ConcurrentLinkedHashMap<>();
if (!participants.containsKey(e.getKey()))
participants.put(e.getKey(), e.getValue());
}
return true;
}
finally {
participantsLock.readLock().unlock();
}
}
/**
* @param leftNodeId Left node ID.
* @return {@code True} if participant has been removed and there are no participants left.
*/
boolean removeParticipant(UUID leftNodeId) {
assert leftNodeId != null;
participantsLock.writeLock().lock();
try {
return participants != null && participants.remove(leftNodeId) != null && participants.isEmpty();
}
finally {
participantsLock.writeLock().unlock();
}
}
/**
* @return Participants.
*/
Map<UUID, IgniteUuid> participants() {
return participants;
}
/**
* @return Sender ID.
*/
UUID senderId() {
return sndId;
}
/**
* @return Class loader ID.
*/
IgniteUuid loaderId() {
return ldrId;
}
/**
* @return User version.
*/
String userVersion() {
return userVer;
}
/**
* @return Deployment mode.
*/
public DeploymentMode mode() {
return depMode;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CachedDeploymentInfo.class, this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.operators.collect;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.common.accumulators.SerializedListAccumulator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.operators.coordination.CoordinationRequestGateway;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* A fetcher which fetches query results from sink and provides exactly-once semantics.
*/
public class CollectResultFetcher<T> {
private static final int DEFAULT_RETRY_MILLIS = 100;
private static final long DEFAULT_ACCUMULATOR_GET_MILLIS = 10000;
private static final Logger LOG = LoggerFactory.getLogger(CollectResultFetcher.class);
private final CompletableFuture<OperatorID> operatorIdFuture;
private final String accumulatorName;
private final int retryMillis;
private ResultBuffer buffer;
@Nullable
private JobClient jobClient;
@Nullable
private CoordinationRequestGateway gateway;
private boolean jobTerminated;
private boolean closed;
public CollectResultFetcher(
CompletableFuture<OperatorID> operatorIdFuture,
TypeSerializer<T> serializer,
String accumulatorName) {
this(
operatorIdFuture,
serializer,
accumulatorName,
DEFAULT_RETRY_MILLIS);
}
CollectResultFetcher(
CompletableFuture<OperatorID> operatorIdFuture,
TypeSerializer<T> serializer,
String accumulatorName,
int retryMillis) {
this.operatorIdFuture = operatorIdFuture;
this.accumulatorName = accumulatorName;
this.retryMillis = retryMillis;
this.buffer = new ResultBuffer(serializer);
this.jobTerminated = false;
this.closed = false;
}
public void setJobClient(JobClient jobClient) {
Preconditions.checkArgument(
jobClient instanceof CoordinationRequestGateway,
"Job client must be a CoordinationRequestGateway. This is a bug.");
this.jobClient = jobClient;
this.gateway = (CoordinationRequestGateway) jobClient;
}
public T next() throws IOException {
if (closed) {
return null;
}
// this is to avoid sleeping before first try
boolean beforeFirstTry = true;
do {
T res = buffer.next();
if (res != null) {
// we still have user-visible results, just use them
return res;
} else if (jobTerminated) {
// no user-visible results, but job has terminated, we have to return
return null;
} else if (!beforeFirstTry) {
// no results but job is still running, sleep before retry
sleepBeforeRetry();
}
beforeFirstTry = false;
if (isJobTerminated()) {
// job terminated, read results from accumulator
jobTerminated = true;
Tuple2<Long, CollectCoordinationResponse<T>> accResults = getAccumulatorResults();
buffer.dealWithResponse(accResults.f1, accResults.f0);
buffer.complete();
} else {
// job still running, try to fetch some results
long requestOffset = buffer.offset;
CollectCoordinationResponse<T> response;
try {
response = sendRequest(buffer.version, requestOffset);
} catch (Exception e) {
LOG.warn("An exception occurs when fetching query results", e);
continue;
}
// the response will contain data (if any) starting exactly from requested offset
buffer.dealWithResponse(response, requestOffset);
}
} while (true);
}
public void close() {
if (closed) {
return;
}
cancelJob();
closed = true;
}
@SuppressWarnings("unchecked")
private CollectCoordinationResponse<T> sendRequest(
String version,
long offset) throws InterruptedException, ExecutionException {
checkJobClientConfigured();
OperatorID operatorId = operatorIdFuture.getNow(null);
Preconditions.checkNotNull(operatorId, "Unknown operator ID. This is a bug.");
CollectCoordinationRequest request = new CollectCoordinationRequest(version, offset);
return (CollectCoordinationResponse<T>) gateway.sendCoordinationRequest(operatorId, request).get();
}
private Tuple2<Long, CollectCoordinationResponse<T>> getAccumulatorResults() throws IOException {
checkJobClientConfigured();
JobExecutionResult executionResult;
try {
// this timeout is sort of hack, see comments in isJobTerminated for explanation
executionResult = jobClient.getJobExecutionResult(getClass().getClassLoader()).get(
DEFAULT_ACCUMULATOR_GET_MILLIS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new IOException("Failed to fetch job execution result", e);
}
ArrayList<byte[]> accResults = executionResult.getAccumulatorResult(accumulatorName);
if (accResults == null) {
// job terminates abnormally
throw new IOException("Job terminated abnormally, no job execution result can be fetched");
}
try {
List<byte[]> serializedResults =
SerializedListAccumulator.deserializeList(accResults, BytePrimitiveArraySerializer.INSTANCE);
byte[] serializedResult = serializedResults.get(0);
return CollectSinkFunction.deserializeAccumulatorResult(serializedResult);
} catch (ClassNotFoundException | IOException e) {
// this is impossible
throw new IOException("Failed to deserialize accumulator results", e);
}
}
private boolean isJobTerminated() {
checkJobClientConfigured();
try {
JobStatus status = jobClient.getJobStatus().get();
return status.isGloballyTerminalState();
} catch (Exception e) {
// TODO
// This is sort of hack.
// Currently different execution environment will have different behaviors
// when fetching a finished job status.
// For example, standalone session cluster will return a normal FINISHED,
// while mini cluster will throw IllegalStateException,
// and yarn per job will throw ApplicationNotFoundException.
// We have to assume that job has finished in this case.
// Change this when these behaviors are unified.
LOG.warn("Failed to get job status so we assume that the job has terminated. Some data might be lost.", e);
return true;
}
}
private void cancelJob() {
checkJobClientConfigured();
if (!isJobTerminated()) {
jobClient.cancel();
}
}
private void sleepBeforeRetry() {
if (retryMillis <= 0) {
return;
}
try {
// TODO a more proper retry strategy?
Thread.sleep(retryMillis);
} catch (InterruptedException e) {
LOG.warn("Interrupted when sleeping before a retry", e);
}
}
private void checkJobClientConfigured() {
Preconditions.checkNotNull(jobClient, "Job client must be configured before first use.");
Preconditions.checkNotNull(gateway, "Coordination request gateway must be configured before first use.");
}
/**
* A buffer which encapsulates the logic of dealing with the response from the {@link CollectSinkFunction}.
* See Java doc of {@link CollectSinkFunction} for explanation of this communication protocol.
*/
private class ResultBuffer {
private static final String INIT_VERSION = "";
private final LinkedList<T> buffer;
private final TypeSerializer<T> serializer;
// for detailed explanation of the following 3 variables, see Java doc of CollectSinkFunction
// `version` is to check if the sink restarts
private String version;
// `offset` is the offset of the next result we want to fetch
private long offset;
// userVisibleHead <= user visible results offset < userVisibleTail
private long userVisibleHead;
private long userVisibleTail;
private ResultBuffer(TypeSerializer<T> serializer) {
this.buffer = new LinkedList<>();
this.serializer = serializer;
this.version = INIT_VERSION;
this.offset = 0;
this.userVisibleHead = 0;
this.userVisibleTail = 0;
}
private T next() {
if (userVisibleHead == userVisibleTail) {
return null;
}
T ret = buffer.removeFirst();
userVisibleHead++;
sanityCheck();
return ret;
}
private void dealWithResponse(CollectCoordinationResponse<T> response, long responseOffset) throws IOException {
String responseVersion = response.getVersion();
long responseLastCheckpointedOffset = response.getLastCheckpointedOffset();
List<T> results = response.getResults(serializer);
// we first check version in the response to decide whether we should throw away dirty results
if (!version.equals(responseVersion)) {
// sink restarted, we revert back to where the sink tells us
for (long i = 0; i < offset - responseLastCheckpointedOffset; i++) {
buffer.removeLast();
}
version = responseVersion;
offset = responseLastCheckpointedOffset;
}
// we now check if more results can be seen by the user
if (responseLastCheckpointedOffset > userVisibleTail) {
// lastCheckpointedOffset increases, this means that more results have been
// checkpointed, and we can give these results to the user
userVisibleTail = responseLastCheckpointedOffset;
}
if (!results.isEmpty()) {
// response contains some data, add them to buffer
int addStart = (int) (offset - responseOffset);
List<T> addedResults = results.subList(addStart, results.size());
buffer.addAll(addedResults);
offset += addedResults.size();
}
sanityCheck();
}
private void complete() {
userVisibleTail = offset;
}
private void sanityCheck() {
Preconditions.checkState(
userVisibleHead <= userVisibleTail,
"userVisibleHead should not be larger than userVisibleTail. This is a bug.");
Preconditions.checkState(
userVisibleTail <= offset,
"userVisibleTail should not be larger than offset. This is a bug.");
}
}
}
| |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.transport.wsn;
import static java.util.concurrent.TimeUnit.SECONDS;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.security.KeyStore;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.mina.core.service.IoHandler;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.kaazing.gateway.resource.address.ResourceAddress;
import org.kaazing.gateway.resource.address.ResourceAddressFactory;
import org.kaazing.gateway.transport.BridgeServiceFactory;
import org.kaazing.gateway.transport.IoHandlerAdapter;
import org.kaazing.gateway.transport.TransportFactory;
import org.kaazing.gateway.transport.http.HttpAcceptor;
import org.kaazing.gateway.transport.http.HttpConnector;
import org.kaazing.gateway.transport.nio.internal.socket.NioSocketAcceptor;
import org.kaazing.gateway.transport.nio.internal.socket.NioSocketConnector;
import org.kaazing.gateway.transport.ssl.SslAcceptor;
import org.kaazing.gateway.transport.ssl.bridge.filter.SslCertificateSelectionFilter;
import org.kaazing.gateway.transport.ws.WsAcceptor;
import org.kaazing.gateway.util.scheduler.SchedulerProvider;
import org.kaazing.mina.core.future.UnbindFuture;
public class WsnBindingsTest {
private SchedulerProvider schedulerProvider;
private ResourceAddressFactory addressFactory;
private NioSocketConnector tcpConnector;
private HttpConnector httpConnector;
private WsnConnector wsnConnector;
private NioSocketAcceptor tcpAcceptor;
private SslAcceptor sslAcceptor;
private HttpAcceptor httpAcceptor;
private WsnAcceptor wsnAcceptor;
private KeyStore keyStore;
private String keyStoreFile;
private String keyStoreFilePath;
private String keyStorePassword;
private String keyStorePasswordFile;
private KeyStore trustStore;
private String trustStoreFile;
private String trustStoreFilePath;
private String getPassword(String file)
throws Exception {
ClassLoader loader = Thread.currentThread().getContextClassLoader();
File f = new File(loader.getResource(file).toURI());
keyStorePasswordFile = f.getPath();
FileReader fr = new FileReader(f);
BufferedReader br = new BufferedReader(fr);
String line = br.readLine();
br.close();
return line;
}
private KeyStore getKeyStore(String file) throws Exception {
KeyStore ks = KeyStore.getInstance("JCEKS");
ClassLoader loader = Thread.currentThread().getContextClassLoader();
File f = new File(loader.getResource(file).toURI());
keyStoreFile = f.getPath();
keyStoreFilePath = f.getAbsolutePath();
FileInputStream fis = null;
try {
fis = new FileInputStream(f);
ks.load(fis, keyStorePassword.toCharArray());
} finally {
if (fis != null) {
fis.close();
}
}
return ks;
}
private KeyStore getTrustStore(String file) throws Exception {
KeyStore ks = KeyStore.getInstance("JCEKS");
ClassLoader loader = Thread.currentThread().getContextClassLoader();
File f = new File(loader.getResource(file).toURI());
trustStoreFile = f.getPath();
trustStoreFilePath = f.getAbsolutePath();
FileInputStream fis = null;
try {
fis = new FileInputStream(f);
ks.load(fis, null);
} finally {
if (fis != null) {
fis.close();
}
}
return ks;
}
private TestSecurityContext getSecurityContext() throws Exception {
return new TestSecurityContext(keyStore, keyStoreFile, keyStoreFilePath, keyStorePassword.toCharArray(),
keyStorePasswordFile, trustStore, trustStoreFile, trustStoreFilePath, null);
}
@After
public void tearDown() throws Exception {
// For reasons unknown, tcpAcceptor.unbind does not actually free up the bound port until dispose is called.
// This causes the next test method to fail to bind.
tcpConnector.dispose();
sslAcceptor.dispose();
tcpAcceptor.dispose();
schedulerProvider.shutdownNow();
}
@Rule
public TestRule timeout = new DisableOnDebug(new Timeout(5, SECONDS));
@Before
public void init() throws Exception {
keyStorePassword = getPassword("keystore.pw");
keyStore = getKeyStore("keystore.db");
trustStore = getTrustStore("truststore-JCEKS.db");
SslCertificateSelectionFilter.setCurrentSession(null, true);
SslCertificateSelectionFilter.setCurrentSession(null, false);
TestSecurityContext securityContext = getSecurityContext();
schedulerProvider = new SchedulerProvider();
addressFactory = ResourceAddressFactory.newResourceAddressFactory();
TransportFactory transportFactory = TransportFactory.newTransportFactory(Collections.EMPTY_MAP);
BridgeServiceFactory serviceFactory = new BridgeServiceFactory(transportFactory);
tcpAcceptor = (NioSocketAcceptor)transportFactory.getTransport("tcp").getAcceptor();
tcpAcceptor.setResourceAddressFactory(addressFactory);
tcpAcceptor.setBridgeServiceFactory(serviceFactory);
tcpAcceptor.setSchedulerProvider(schedulerProvider);
tcpConnector = (NioSocketConnector)transportFactory.getTransport("tcp").getConnector();
tcpConnector.setResourceAddressFactory(addressFactory);
tcpConnector.setBridgeServiceFactory(serviceFactory);
sslAcceptor = (SslAcceptor)transportFactory.getTransport("ssl").getAcceptor();
sslAcceptor.setSecurityContext(securityContext);
sslAcceptor.setBridgeServiceFactory(serviceFactory);
httpAcceptor = (HttpAcceptor)transportFactory.getTransport("http").getAcceptor();
httpAcceptor.setBridgeServiceFactory(serviceFactory);
httpAcceptor.setResourceAddressFactory(addressFactory);
httpAcceptor.setSchedulerProvider(schedulerProvider);
httpConnector = (HttpConnector)transportFactory.getTransport("http").getConnector();
httpConnector.setBridgeServiceFactory(serviceFactory);
httpConnector.setResourceAddressFactory(addressFactory);
wsnAcceptor = (WsnAcceptor)transportFactory.getTransport("wsn").getAcceptor();
wsnAcceptor.setBridgeServiceFactory(serviceFactory);
wsnAcceptor.setResourceAddressFactory(addressFactory);
wsnAcceptor.setSchedulerProvider(schedulerProvider);
WsAcceptor wsAcceptor = (WsAcceptor) transportFactory.getTransport("ws").getAcceptor();
wsAcceptor.setWsnAcceptor(wsnAcceptor);
wsnConnector = (WsnConnector)transportFactory.getTransport("wsn").getConnector();
wsnConnector.setBridgeServiceFactory(serviceFactory);
wsnConnector.setResourceAddressFactory(addressFactory);
}
@After
public void disposeConnector() {
if (tcpAcceptor != null) {
tcpAcceptor.dispose();
}
if (httpAcceptor != null) {
httpAcceptor.dispose();
}
if (wsnAcceptor != null) {
wsnAcceptor.dispose();
}
if (tcpConnector != null) {
tcpConnector.dispose();
}
if (httpConnector != null) {
httpConnector.dispose();
}
if (wsnConnector != null) {
wsnConnector.dispose();
}
}
@Test
public void shouldBindAndUnbindLeavingEmptyBindingsMaps() throws Exception {
Map<String, Object> acceptOptions = new HashMap<>();
final String connectURIString = "wsn://localhost:8004/echo";
final ResourceAddress bindAddress =
addressFactory.newResourceAddress(
connectURIString,
acceptOptions);
final IoHandler ioHandler = new IoHandlerAdapter();
for ( int i = 0; i < 10; i++) {
wsnAcceptor.bind(bindAddress, ioHandler, null);
}
for (int j = 0; j < 10; j++) {
UnbindFuture future = wsnAcceptor.unbind(bindAddress);
org.junit.Assert.assertTrue("Unbind failed", future.await(10, TimeUnit.SECONDS));
}
org.junit.Assert.assertTrue(wsnAcceptor.emptyBindings());
org.junit.Assert.assertTrue(httpAcceptor.emptyBindings());
org.junit.Assert.assertTrue(tcpAcceptor.emptyBindings());
}
@Test
public void shouldBindAndUnbindSecureAddressesLeavingEmptyBindingsMaps() throws Exception {
Map<String, Object> acceptOptions = new HashMap<>();
final String connectURIString = "wsn+ssl://localhost:8005/echo";
final ResourceAddress bindAddress =
addressFactory.newResourceAddress(
connectURIString,
acceptOptions);
final IoHandler ioHandler = new IoHandlerAdapter();
for ( int i = 0; i < 10; i++) {
wsnAcceptor.bind(bindAddress, ioHandler, null);
}
for (int j = 0; j < 10; j++) {
UnbindFuture future = wsnAcceptor.unbind(bindAddress);
org.junit.Assert.assertTrue("Unbind failed", future.await(10, TimeUnit.SECONDS));
}
org.junit.Assert.assertTrue(wsnAcceptor.emptyBindings());
org.junit.Assert.assertTrue(sslAcceptor.emptyBindings());
org.junit.Assert.assertTrue(httpAcceptor.emptyBindings());
org.junit.Assert.assertTrue(tcpAcceptor.emptyBindings());
}
}
| |
package outland.feature;
import com.google.common.collect.Sets;
import java.util.stream.IntStream;
import org.junit.Test;
import outland.feature.proto.Feature;
import outland.feature.proto.FeatureOption;
import outland.feature.proto.NamespaceFeature;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
public class FeatureRecordTest {
@Test
public void testSelectDefault() {
FeatureRecord stringRecord = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-string-off-default-on-namespace-off-namespace.json"));
assertEquals("an off feature returns its control option",
"green", stringRecord.select(ServerConfiguration.DEFAULT_NAMESPACE));
}
@Test
public void testSelectOptions() {
// IntStream iterations are there to stress sampler
FeatureRecord stringRecord = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-string-off-default-on-namespace-off-namespace.json"));
IntStream.range(0, 100).forEach(
i -> {
assertEquals("an off feature returns its control option",
"green", stringRecord.select());
// production: off control: red, red's weight is 0 so can't be sampled
assertEquals("an off namespace feature returns its control option",
"red", stringRecord.select("production"));
// development: on control: blue, red weight :10000 => sample fixed to always be red
assertEquals("an on namespace feature returns its weighted option",
"red", stringRecord.select("development"));
});
FeatureRecord boolRecord1 = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-bool-off-default-on-namespace-always-true.json"));
IntStream.range(0, 100).forEach(
i -> {
// weighted to 10K true so a select can't return anything but true
assertEquals("an off bool returns its control option",
"false", boolRecord1.select());
// weighted to 10K true with a false control, so has to return true if on
assertEquals("an on namespace feature returns its weighted option",
"true", boolRecord1.select("development"));
});
FeatureRecord boolRecord2 = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-bool-on-default-off-namespace-always-false.json"));
IntStream.range(0, 100).forEach(
i -> {
// false control weighted to 10K true so a selecti can't return anything but true
assertEquals("an on bool returns its weighted option",
"true", boolRecord2.select());
/*
this one's a bit strange. the namespace is off and the weights are 100% false
but the control is true, so we expect the off to select into "true"
*/
assertEquals("an off namespace feature returns its control option, even if "
+ "that control is 'true'",
"true", boolRecord2.select("development"));
});
}
@Test
public void testSelectBoolOptions() {
// same as testSelectOptions but with the selectBoolean wrapper
FeatureRecord boolRecord1 = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-bool-off-default-on-namespace-always-true.json"));
IntStream.range(0, 100).forEach(
i -> {
assertFalse("an off bool returns its control option",
boolRecord1.selectBoolean());
assertTrue("an on namespace feature returns its weighted option",
boolRecord1.selectBoolean("development"));
});
FeatureRecord boolRecord2 = FeatureRecord.build(TestSupport.loadFeature(
"json/feature-select-bool-on-default-off-namespace-always-false.json"));
IntStream.range(0, 100).forEach(
i -> {
assertTrue("an on bool returns its weighted option",
boolRecord2.selectBoolean());
assertTrue("an off namespace feature returns its control option, even if "
+ "that control is 'true'",
boolRecord2.selectBoolean("development"));
});
}
@Test
public void testEnabledFlag() {
final String namespace = "staging";
final Feature feature1 =
TestSupport.loadFeature("json/feature-flag-on-default-off-namespace.json");
FeatureRecord record1 = FeatureRecord.build(feature1);
assertTrue("true when default state is on",
record1.enabled());
assertTrue("true when namespace missing and falls back to default",
record1.enabled("missing"));
assertFalse("false when an existing namespace state is off",
record1.enabled(namespace));
final Feature feature =
TestSupport.loadFeature("json/feature-flag-off-default-on-namespace.json");
FeatureRecord record = FeatureRecord.build(feature);
assertFalse("false when default state is off",
record.enabled());
assertFalse("false when namespace missing and falls back to default",
record.enabled("missing"));
assertTrue("true when an existing namespace state is on",
record.enabled(namespace));
}
@Test
public void testEnabledString() {
/*
test state level enabled of a string option. when we're looking at strings wrt
enabled, we're just looking at their on/states not selecting them for a value.
*/
final Feature feature =
TestSupport.loadFeature("json/feature-string-off-default-on-namespace.json");
FeatureRecord record = FeatureRecord.build(feature);
final String namespace = "development";
assertFalse("false when default state is off",
record.enabled());
assertFalse("false when namespace missing and falls back to default",
record.enabled("missing"));
assertTrue("true when an existing namespace state is on",
record.enabled(namespace));
}
@Test
public void testEnabledBool() {
/*
test state level enablement of a bool option. when we're looking at bools wrt
enabled, we're just looking at their on/states, we're not looking at their
weighted values. factoring in weights values is done via the selection calls.
*/
final Feature feature1 =
TestSupport.loadFeature("json/feature-bool-off-default-on-namespace-always-false.json");
FeatureRecord record1 = FeatureRecord.build(feature1);
final String namespace = "development";
assertFalse("false when default state is off",
record1.enabled());
assertFalse("false when namespace missing and falls back to default",
record1.enabled("missing"));
assertTrue("true when an existing namespace state is on but the weights 100% biased false",
record1.enabled(namespace));
final Feature feature =
TestSupport.loadFeature("json/feature-bool-off-default-on-namespace-always-true.json");
FeatureRecord record = FeatureRecord.build(feature);
assertEquals(feature, record.feature());
assertFalse("false when default state is off",
record.enabled());
assertFalse("false when namespace missing and falls back to default",
record.enabled("missing"));
assertTrue("true when an existing namespace state is on and the weight 100% biased true",
record.enabled(namespace));
}
@Test
public void testPrepare() {
final Feature feature = TestSupport.loadFeature("json/feature-1.json");
FeatureRecord record = FeatureRecord.build(feature);
assertEquals(feature, record.feature());
final FeatureOption controlFeatureOption = record.controlFeatureOption();
assertNotNull(controlFeatureOption);
assertEquals("option-green", controlFeatureOption.getKey());
assertEquals("green", controlFeatureOption.getValue());
assertEquals(3000, controlFeatureOption.getWeight());
final NamespaceFeature staging = record.namespace("staging");
assertNotNull(staging);
final FeatureOption stagingControlOptionRecord = record.controlFeatureOption("staging");
assertNotNull(stagingControlOptionRecord);
FeatureOption stagingControlOptionData = null;
final String control = staging.getFeature().getOptions().getControl();
for (FeatureOption featureOption : staging.getFeature().getOptions().getItemsList()) {
if (featureOption.getKey().equals(control)) {
stagingControlOptionData = featureOption;
break;
}
}
assertEquals(stagingControlOptionData, stagingControlOptionRecord);
assertNotNull(record.optionSelectorWeighted());
assertNotNull(record.optionSelectorWeighted("staging"));
}
@Test
public void testEquals() {
final Feature feature = TestSupport.loadFeature(
"json/feature-select-string-off-default-on-namespace-off-namespace.json");
final Feature feature1 = TestSupport.loadFeature(
"json/feature-select-string-off-default-on-namespace-off-namespace.json");
final Feature feature2 = TestSupport.loadFeature("json/feature-1.json");
assertEquals(feature, feature1);
assertNotEquals(feature, feature2);
assertSame(2, Sets.newHashSet(feature, feature1, feature2).size());
assertEquals(feature.toString(), feature1.toString());
}
}
| |
package tcas;
import gov.nasa.jpf.symbc.Symbolic;
public class TCAS_V30 {
private static final int OLEV = 600;/* in feets/minute */
private static final int MAXALTDIFF = 600; /* max altitude difference in feet */
private static final int MINSEP = 300; /* min separation in feet */
private static final int NOZCROSS = 100; /* in feet */
@Symbolic("true")
int Cur_Vertical_Sep;
@Symbolic("true")
boolean High_Confidence;
@Symbolic("true")
boolean Two_of_Three_Reports_Valid;
@Symbolic("true")
int Own_Tracked_Alt;
@Symbolic("true")
int Own_Tracked_Alt_Rate;
@Symbolic("true")
int Other_Tracked_Alt;
@Symbolic("true")
int Alt_Layer_Value; /* 0, 1, 2, 3 */
int[] Positive_RA_Alt_Thresh = new int[4];
@Symbolic("true")
int Up_Separation;
@Symbolic("true")
int Down_Separation;
/* state variables */
@Symbolic("true")
int Other_RAC; /* NO_INTENT, DO_NOT_CLIMB, DO_NOT_DESCEND */
private static final int NO_INTENT = 0;
private static final int DO_NOT_CLIMB = 1;
private static final int DO_NOT_DESCEND = 2;
@Symbolic("true")
int Other_Capability; /* TCAS_TA, OTHER */
private static final int TCAS_TA = 1;
private static final int OTHER = 2;
@Symbolic("true")
boolean Climb_Inhibit; /* true/false */
private static final int UNRESOLVED = 0;
private static final int UPWARD_RA = 1;
private static final int DOWNWARD_RA = 2;
void initialize(){
Positive_RA_Alt_Thresh[0] = 400;
Positive_RA_Alt_Thresh[1] = 500;
Positive_RA_Alt_Thresh[2] = 640;
Positive_RA_Alt_Thresh[3] = 740;
}
private int ALIM (){
return Positive_RA_Alt_Thresh[Alt_Layer_Value];
}
private int Inhibit_Biased_Climb (){
return (Up_Separation + NOZCROSS);
}
private boolean Non_Crossing_Biased_Climb(){
boolean upward_preferred;
int upward_crossing_situation;
boolean result;
upward_preferred = Inhibit_Biased_Climb() > Down_Separation;
if (upward_preferred){
result = !(Own_Below_Threat()) || ((Own_Below_Threat()) && (!(Down_Separation >= ALIM())));
}else{
result = Own_Above_Threat() && (Cur_Vertical_Sep >= MINSEP) && (Up_Separation >= ALIM());
}
return result;
}
private boolean Non_Crossing_Biased_Descend(){
boolean upward_preferred;
int upward_crossing_situation;
boolean result;
upward_preferred = Inhibit_Biased_Climb() > Down_Separation;
if (upward_preferred){
result = Own_Below_Threat() && (Cur_Vertical_Sep >= MINSEP) && (Down_Separation >= ALIM());
}else{
result = !(Own_Above_Threat()) || ((Own_Above_Threat()) && (Up_Separation >= ALIM()));
}
return result;
}
private boolean Own_Below_Threat(){
return (Own_Tracked_Alt < Other_Tracked_Alt);
}
private boolean Own_Above_Threat(){
return (Other_Tracked_Alt < Own_Tracked_Alt);
}
int alt_sep_test(){
boolean enabled, tcas_equipped, intent_not_known;
boolean need_upward_RA, need_downward_RA;
int alt_sep;
enabled = High_Confidence && (Own_Tracked_Alt_Rate <= OLEV) && (Cur_Vertical_Sep > MAXALTDIFF);
tcas_equipped = Other_Capability == TCAS_TA;
intent_not_known = Two_of_Three_Reports_Valid && Other_RAC == NO_INTENT;
alt_sep = UNRESOLVED;
if (enabled && ((tcas_equipped && intent_not_known) || !tcas_equipped)){
need_upward_RA = Non_Crossing_Biased_Climb() && Own_Below_Threat();
need_downward_RA = Non_Crossing_Biased_Descend() && Own_Above_Threat();
if (need_upward_RA && need_downward_RA)
/* unreachable: requires Own_Below_Threat and Own_Above_Threat
to both be true - that requires Own_Tracked_Alt < Other_Tracked_Alt
and Other_Tracked_Alt < Own_Tracked_Alt, which isn't possible */
alt_sep = UNRESOLVED;
else if (need_upward_RA)
alt_sep = UPWARD_RA;
else if (need_downward_RA)
alt_sep = DOWNWARD_RA;
else
alt_sep = UNRESOLVED;
}
return alt_sep;
}
//alternate entry point for test purposes
public int startTcas(int cvs, boolean hc, boolean ttrv, int ota, int otar,
int otTa, int alv, int upS, int dS, int oRAC, int oc, boolean ci){
Cur_Vertical_Sep = cvs;
High_Confidence = hc;
Two_of_Three_Reports_Valid = ttrv;
Own_Tracked_Alt = ota;
Own_Tracked_Alt_Rate = otar;
Other_Tracked_Alt = otTa;
Alt_Layer_Value = alv;
Up_Separation = upS;
Down_Separation = dS;
Other_RAC = oRAC;
Other_Capability = oc;
Climb_Inhibit = ci;
initialize();
return alt_sep_test();
}
public static void main (String[] args) {
TCAS_V30 tcas = new TCAS_V30();
if (args.length == 12){
tcas.Cur_Vertical_Sep = Integer.parseInt(args[0]);
if (args[1].equalsIgnoreCase("0"))
tcas.High_Confidence = false;
else
tcas.High_Confidence = true;
if (args[2].equalsIgnoreCase("0"))
tcas.Two_of_Three_Reports_Valid = false;
else
tcas.Two_of_Three_Reports_Valid = true;
tcas.Own_Tracked_Alt = Integer.parseInt((args[3]));
tcas.Own_Tracked_Alt_Rate = Integer.parseInt(args[4]);
tcas.Other_Tracked_Alt = Integer.parseInt(args[5]);
tcas.Alt_Layer_Value = Integer.parseInt(args[6]);
tcas.Up_Separation = Integer.parseInt(args[7]);
tcas.Down_Separation = Integer.parseInt(args[8]);
tcas.Other_RAC = Integer.parseInt(args[9]);
tcas.Other_Capability = Integer.parseInt(args[10]);
if (args[11].equalsIgnoreCase("0"))
tcas.Climb_Inhibit = false;
else
tcas.Climb_Inhibit = true;
}else if (args.length == 0){
}else{
System.out.println("Invalid number of args");
}
tcas.initialize();
int res = tcas.alt_sep_test();
System.out.println(">>>>>>results: " + res);
}
/*
main(argc, argv)
int argc;
char *argv[];
{
if(argc < 13)
{
fprintf(stdout, "Error: Command line arguments are\n");
fprintf(stdout, "Cur_Vertical_Sep, High_Confidence, Two_of_Three_Reports_Valid\n");
fprintf(stdout, "Own_Tracked_Alt, Own_Tracked_Alt_Rate, Other_Tracked_Alt\n");
fprintf(stdout, "Alt_Layer_Value, Up_Separation, Down_Separation\n");
fprintf(stdout, "Other_RAC, Other_Capability, Climb_Inhibit\n");
exit(1);
}
initialize();
Cur_Vertical_Sep = atoi(argv[1]);
High_Confidence = atoi(argv[2]);
Two_of_Three_Reports_Valid = atoi(argv[3]);
Own_Tracked_Alt = atoi(argv[4]);
Own_Tracked_Alt_Rate = atoi(argv[5]);
Other_Tracked_Alt = atoi(argv[6]);
Alt_Layer_Value = atoi(argv[7]);
Up_Separation = atoi(argv[8]);
Down_Separation = atoi(argv[9]);
Other_RAC = atoi(argv[10]);
Other_Capability = atoi(argv[11]);
Climb_Inhibit = atoi(argv[12]);
fprintf(stdout, "%d\n", alt_sep_test());
exit(0);
}*/
}
| |
/*
* Copyright 2019 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.errorprone.matchers.Matchers.anyOf;
import static com.google.errorprone.matchers.method.MethodMatchers.instanceMethod;
import static com.google.errorprone.matchers.method.MethodMatchers.staticMethod;
import com.google.errorprone.BugPattern;
import com.google.errorprone.BugPattern.SeverityLevel;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.VariableTreeMatcher;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.util.ASTHelpers;
import com.google.errorprone.util.ASTHelpers.TargetType;
import com.sun.source.tree.AnnotationTree;
import com.sun.source.tree.AssignmentTree;
import com.sun.source.tree.CompoundAssignmentTree;
import com.sun.source.tree.EnhancedForLoopTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.IdentifierTree;
import com.sun.source.tree.MemberReferenceTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.ReturnTree;
import com.sun.source.tree.StatementTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.Tree.Kind;
import com.sun.source.tree.VariableTree;
import com.sun.source.util.TreePath;
import com.sun.source.util.TreePathScanner;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.MethodSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.code.TypeTag;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import javax.lang.model.element.ElementKind;
/**
* Finds and fixes unnecessarily boxed variables.
*
* @author awturner@google.com (Andy Turner)
*/
@BugPattern(
name = "UnnecessaryBoxedVariable",
summary = "It is unnecessary for this variable to be boxed. Use the primitive instead.",
explanation =
"This variable is of boxed type, but equivalent semantics can be achieved using the"
+ " corresponding primitive type, which avoids the cost of constructing an unnecessary"
+ " object.",
severity = SeverityLevel.SUGGESTION)
public class UnnecessaryBoxedVariable extends BugChecker implements VariableTreeMatcher {
private static final Matcher<ExpressionTree> VALUE_OF_MATCHER =
staticMethod().onClass(UnnecessaryBoxedVariable::isBoxableType).named("valueOf");
@Override
public Description matchVariable(VariableTree tree, VisitorState state) {
Optional<Type> unboxed = unboxed(tree, state);
if (!unboxed.isPresent()) {
return Description.NO_MATCH;
}
VarSymbol varSymbol = ASTHelpers.getSymbol(tree);
if (varSymbol == null) {
return Description.NO_MATCH;
}
switch (varSymbol.getKind()) {
case PARAMETER:
if (!canChangeMethodSignature(state, (MethodSymbol) varSymbol.getEnclosingElement())) {
return Description.NO_MATCH;
}
// Fall through.
case LOCAL_VARIABLE:
if (!variableMatches(tree, state)) {
return Description.NO_MATCH;
}
break;
default:
return Description.NO_MATCH;
}
Optional<TreePath> enclosingMethod = getEnclosingMethod(state.getPath());
if (!enclosingMethod.isPresent()) {
return Description.NO_MATCH;
}
TreePath path = enclosingMethod.get();
FindBoxedUsagesScanner scanner = new FindBoxedUsagesScanner(varSymbol, path, state);
scanner.scan(path, null);
if (scanner.boxedUsageFound) {
return Description.NO_MATCH;
}
if (!scanner.used && varSymbol.getKind() == ElementKind.PARAMETER) {
// If it isn't used and it is a parameter, don't fix it, because this could introduce a new
// NPE.
return Description.NO_MATCH;
}
SuggestedFix.Builder fixBuilder = SuggestedFix.builder();
fixBuilder.replace(tree.getType(), unboxed.get().tsym.getSimpleName().toString());
fixMethodInvocations(scanner.fixableSimpleMethodInvocations, fixBuilder, state);
fixNullCheckInvocations(scanner.fixableNullCheckInvocations, fixBuilder, state);
fixCastingInvocations(
scanner.fixableCastMethodInvocations, enclosingMethod.get(), fixBuilder, state);
// Remove @Nullable annotation, if present.
AnnotationTree nullableAnnotation =
ASTHelpers.getAnnotationWithSimpleName(tree.getModifiers().getAnnotations(), "Nullable");
if (nullableAnnotation != null) {
fixBuilder.replace(nullableAnnotation, "");
return buildDescription(tree)
.setMessage(
"All usages of this @Nullable variable would result in a NullPointerException when it"
+ " actually is null. Use the primitive type if this variable should never be"
+ " null, or else fix the code to avoid unboxing or invoking its instance"
+ " methods.")
.addFix(fixBuilder.build())
.build();
} else {
return describeMatch(tree, fixBuilder.build());
}
}
private static Optional<Type> unboxed(Tree tree, VisitorState state) {
Type type = ASTHelpers.getType(tree);
if (type == null || !type.isReference()) {
return Optional.empty();
}
Type unboxed = state.getTypes().unboxedType(type);
if (unboxed == null
|| unboxed.getTag() == TypeTag.NONE
// Don't match java.lang.Void.
|| unboxed.getTag() == TypeTag.VOID) {
return Optional.empty();
}
return Optional.of(unboxed);
}
private static void fixNullCheckInvocations(
List<TreePath> nullCheckInvocations, SuggestedFix.Builder fixBuilder, VisitorState state) {
for (TreePath pathForTree : nullCheckInvocations) {
checkArgument(pathForTree.getLeaf() instanceof MethodInvocationTree);
MethodInvocationTree methodInvocation = (MethodInvocationTree) pathForTree.getLeaf();
TargetType targetType = ASTHelpers.targetType(state.withPath(pathForTree));
if (targetType == null) {
// If the check is the only thing in a statement, remove the statement.
StatementTree statementTree =
ASTHelpers.findEnclosingNode(pathForTree, StatementTree.class);
if (statementTree != null) {
fixBuilder.delete(statementTree);
}
} else {
// If it's an expression, we can replace simply with the first argument.
fixBuilder.replace(
methodInvocation, state.getSourceForNode(methodInvocation.getArguments().get(0)));
}
}
}
private static void fixMethodInvocations(
List<MethodInvocationTree> simpleMethodInvocations,
SuggestedFix.Builder fixBuilder,
VisitorState state) {
for (MethodInvocationTree methodInvocation : simpleMethodInvocations) {
ExpressionTree receiver = ASTHelpers.getReceiver(methodInvocation);
Type receiverType = ASTHelpers.getType(receiver);
MemberSelectTree methodSelect = (MemberSelectTree) methodInvocation.getMethodSelect();
fixBuilder.replace(
methodInvocation,
String.format(
"%s.%s(%s)",
receiverType.tsym.getSimpleName(),
methodSelect.getIdentifier(),
state.getSourceForNode(receiver)));
}
}
private static void fixCastingInvocations(
List<MethodInvocationTree> castMethodInvocations,
TreePath enclosingMethod,
SuggestedFix.Builder fixBuilder,
VisitorState state) {
for (MethodInvocationTree castInvocation : castMethodInvocations) {
ExpressionTree receiver = ASTHelpers.getReceiver(castInvocation);
Type expressionType = ASTHelpers.getType(castInvocation);
TreePath castPath = TreePath.getPath(enclosingMethod, castInvocation);
if (castPath.getParentPath() != null
&& castPath.getParentPath().getLeaf().getKind() == Kind.EXPRESSION_STATEMENT) {
// If we were to replace X.intValue(); with (int) x;, the code wouldn't compile because
// that's not a statement. Instead, just delete.
fixBuilder.delete(castPath.getParentPath().getLeaf());
} else {
Type unboxedReceiverType = state.getTypes().unboxedType(ASTHelpers.getType(receiver));
if (unboxedReceiverType.getTag() == expressionType.getTag()) {
// someInteger.intValue() can just become someInt.
fixBuilder.replace(castInvocation, state.getSourceForNode(receiver));
} else {
// someInteger.otherPrimitiveValue() can become (otherPrimitive) someInt.
fixBuilder.replace(
castInvocation,
String.format(
"(%s) %s",
expressionType.tsym.getSimpleName(), state.getSourceForNode(receiver)));
}
}
}
}
/**
* Check to see if the variable should be considered for replacement, i.e.
*
* <ul>
* <li>A variable without an initializer
* <li>Enhanced for loop variables can be replaced if they are loops over primitive arrays
* <li>A variable initialized with a primitive value (which is then auto-boxed)
* <li>A variable initialized with an invocation of {@code Boxed.valueOf}, since that can be
* replaced with {@code Boxed.parseBoxed}.
* </ul>
*/
private static boolean variableMatches(VariableTree tree, VisitorState state) {
ExpressionTree expression = tree.getInitializer();
if (expression == null) {
Tree leaf = state.getPath().getParentPath().getLeaf();
if (!(leaf instanceof EnhancedForLoopTree)) {
return true;
}
EnhancedForLoopTree node = (EnhancedForLoopTree) leaf;
Type expressionType = ASTHelpers.getType(node.getExpression());
if (expressionType == null) {
return false;
}
Type elemtype = state.getTypes().elemtype(expressionType);
// Be conservative - if elemtype is null, treat it as if it is a loop over a wrapped type.
return elemtype != null && elemtype.isPrimitive();
}
Type initializerType = ASTHelpers.getType(expression);
if (initializerType == null) {
return false;
}
if (initializerType.isPrimitive()) {
return true;
}
// Don't count X.valueOf(...) as a boxed usage, since it can be replaced with X.parseX.
return VALUE_OF_MATCHER.matches(expression, state);
}
private static Optional<TreePath> getEnclosingMethod(TreePath path) {
while (path != null
&& path.getLeaf().getKind() != Kind.CLASS
&& path.getLeaf().getKind() != Kind.LAMBDA_EXPRESSION) {
if (path.getLeaf().getKind() == Kind.METHOD) {
return Optional.of(path);
}
path = path.getParentPath();
}
return Optional.empty();
}
private static boolean isBoxableType(Type type, VisitorState state) {
Type unboxedType = state.getTypes().unboxedType(type);
return unboxedType != null && unboxedType.getTag() != TypeTag.NONE;
}
private static boolean canChangeMethodSignature(VisitorState state, MethodSymbol methodSymbol) {
return !ASTHelpers.methodCanBeOverridden(methodSymbol)
&& ASTHelpers.findSuperMethods(methodSymbol, state.getTypes()).isEmpty();
}
private static class FindBoxedUsagesScanner extends TreePathScanner<Void, Void> {
// Method invocations like V.hashCode() can be replaced with TypeOfV.hashCode(v).
private static final Matcher<ExpressionTree> SIMPLE_METHOD_MATCH =
instanceMethod().anyClass().namedAnyOf("hashCode", "toString");
// Method invocations like V.intValue() can be replaced with (int) v.
private static final Matcher<ExpressionTree> CAST_METHOD_MATCH =
instanceMethod()
.onClass(UnnecessaryBoxedVariable::isBoxableType)
.namedAnyOf(
"byteValue",
"shortValue",
"intValue",
"longValue",
"floatValue",
"doubleValue",
"booleanValue");
// Method invocations that check (and throw) if the value is potentially null.
private static final Matcher<ExpressionTree> NULL_CHECK_MATCH =
anyOf(
staticMethod().onClass("com.google.common.base.Preconditions").named("checkNotNull"),
staticMethod().onClass("com.google.common.base.Verify").named("verifyNonNull"),
staticMethod().onClass("java.util.Objects").named("requireNonNull"));
private final VarSymbol varSymbol;
private final TreePath path;
private final VisitorState state;
private final List<MethodInvocationTree> fixableSimpleMethodInvocations = new ArrayList<>();
private final List<TreePath> fixableNullCheckInvocations = new ArrayList<>();
private final List<MethodInvocationTree> fixableCastMethodInvocations = new ArrayList<>();
private boolean boxedUsageFound;
private boolean used;
FindBoxedUsagesScanner(VarSymbol varSymbol, TreePath path, VisitorState state) {
this.varSymbol = varSymbol;
this.path = path;
this.state = state;
}
@Override
public Void scan(Tree tree, Void unused) {
if (boxedUsageFound) {
return null;
}
return super.scan(tree, unused);
}
@Override
public Void visitAssignment(AssignmentTree node, Void unused) {
Symbol nodeSymbol = ASTHelpers.getSymbol(node.getVariable());
if (!Objects.equals(nodeSymbol, varSymbol)) {
return super.visitAssignment(node, unused);
}
used = true;
// The variable of interest is being assigned. Check if the expression is non-primitive,
// and go on to scan the expression.
if (!checkAssignmentExpression(node.getExpression())) {
return scan(node.getExpression(), unused);
}
boxedUsageFound = true;
return null;
}
private boolean checkAssignmentExpression(ExpressionTree expression) {
Type expressionType = ASTHelpers.getType(expression);
if (expressionType.isPrimitive()) {
return false;
}
// If the value is assigned a non-primitive value, we need to keep it non-primitive.
// Unless it's an invocation of Boxed.valueOf or new Boxed, in which case it doesn't need to
// be kept boxed since we know the result of valueOf is non-null.
return !VALUE_OF_MATCHER.matches(
expression, state.withPath(TreePath.getPath(path, expression)))
&& expression.getKind() != Kind.NEW_CLASS;
}
@Override
public Void visitIdentifier(IdentifierTree node, Void unused) {
Symbol nodeSymbol = ASTHelpers.getSymbol(node);
if (Objects.equals(nodeSymbol, varSymbol)) {
used = true;
TreePath identifierPath = TreePath.getPath(path, node);
VisitorState identifierState = state.withPath(identifierPath);
TargetType targetType = ASTHelpers.targetType(identifierState);
if (targetType != null && !targetType.type().isPrimitive()) {
boxedUsageFound = true;
return null;
}
}
return super.visitIdentifier(node, unused);
}
@Override
public Void visitCompoundAssignment(CompoundAssignmentTree node, Void unused) {
// Don't count the LHS of compound assignments as boxed usages, because they have to be
// unboxed. Just visit the expression.
return scan(node.getExpression(), unused);
}
@Override
public Void visitMethodInvocation(MethodInvocationTree node, Void unused) {
if (NULL_CHECK_MATCH.matches(node, state)) {
Symbol firstArgSymbol =
ASTHelpers.getSymbol(ASTHelpers.stripParentheses(node.getArguments().get(0)));
if (Objects.equals(firstArgSymbol, varSymbol)) {
used = true;
fixableNullCheckInvocations.add(getCurrentPath());
return null;
}
}
Tree receiver = ASTHelpers.getReceiver(node);
if (receiver != null && Objects.equals(ASTHelpers.getSymbol(receiver), varSymbol)) {
used = true;
if (SIMPLE_METHOD_MATCH.matches(node, state)) {
fixableSimpleMethodInvocations.add(node);
return null;
}
if (CAST_METHOD_MATCH.matches(node, state)) {
fixableCastMethodInvocations.add(node);
return null;
}
boxedUsageFound = true;
return null;
}
return super.visitMethodInvocation(node, unused);
}
@Override
public Void visitReturn(ReturnTree node, Void unused) {
Symbol nodeSymbol = ASTHelpers.getSymbol(ASTHelpers.stripParentheses(node.getExpression()));
if (!Objects.equals(nodeSymbol, varSymbol)) {
return super.visitReturn(node, unused);
}
used = true;
// Don't count a return value as a boxed usage, except if we are returning a parameter, and
// the method's return type is boxed.
if (varSymbol.getKind() == ElementKind.PARAMETER) {
MethodTree enclosingMethod =
ASTHelpers.findEnclosingNode(getCurrentPath(), MethodTree.class);
Type returnType = ASTHelpers.getType(enclosingMethod.getReturnType());
if (!returnType.isPrimitive()) {
boxedUsageFound = true;
}
}
return null;
}
@Override
public Void visitMemberReference(MemberReferenceTree node, Void unused) {
ExpressionTree qualifierExpression = node.getQualifierExpression();
if (qualifierExpression.getKind() == Kind.IDENTIFIER) {
Symbol symbol = ASTHelpers.getSymbol(qualifierExpression);
if (Objects.equals(symbol, varSymbol)) {
boxedUsageFound = true;
used = true;
return null;
}
}
return super.visitMemberReference(node, unused);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.crs4.pydoop.mapreduce.pipes;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import javax.crypto.SecretKey;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.conf.Configuration;
/*
FIXME org.apache.hadoop.mapred.TaskLog is clearly not what it is expected to
be used with org.apache.hadoop.mapreduce.*
For the time being, we use the following as a stand-in.
it.crs4.pydoop.mapreduce.pipes.TaskLog;
*/
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
/**
* This class is responsible for launching and communicating with the child
* process.
*/
class Application<K1 extends Writable, V1 extends Writable,
K2 extends WritableComparable, V2 extends Writable> {
private static final Log LOG = LogFactory.getLog(Application.class.getName());
private ServerSocket serverSocket;
private Process process;
private Socket clientSocket;
private OutputHandler<K2, V2> handler;
private DownwardProtocol<K1, V1> downlink;
static final boolean WINDOWS
= System.getProperty("os.name").startsWith("Windows");
/**
* Start the child process to handle the task for us.
* @throws IOException
* @throws InterruptedException
*/
Application(TaskInputOutputContext<K1,V1,K2,V2> context,
DummyRecordReader input)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
serverSocket = new ServerSocket(0);
Map<String, String> env = new HashMap<String,String>();
// add TMPDIR environment variable with the value of java.io.tmpdir
env.put("TMPDIR", System.getProperty("java.io.tmpdir"));
env.put(Submitter.PORT, Integer.toString(serverSocket.getLocalPort()));
//Add token to the environment if security is enabled
Token<JobTokenIdentifier> jobToken =
TokenCache.getJobToken(context.getCredentials());
// This password is used as shared secret key between this application and
// child pipes process
byte[] password = jobToken.getPassword();
String localPasswordFile = new File(".") + Path.SEPARATOR
+ "jobTokenPassword";
writePasswordToLocalFile(localPasswordFile, password, conf);
// FIXME why is this not Submitter.SECRET_LOCATION ?
env.put("hadoop.pipes.shared.secret.location", localPasswordFile);
List<String> cmd = new ArrayList<String>();
String interpretor = conf.get(Submitter.INTERPRETOR);
if (interpretor != null) {
cmd.add(interpretor);
}
String executable = context.getLocalCacheFiles()[0].toString();
if (!(new File(executable).canExecute())) {
// LinuxTaskController sets +x permissions on all distcache files already.
// In case of DefaultTaskController, set permissions here.
FileUtil.chmod(executable, "u+x");
}
cmd.add(executable);
// wrap the command in a stdout/stderr capture
// we are starting map/reduce task of the pipes job. this is not a cleanup
// attempt.
TaskAttemptID taskid = context.getTaskAttemptID();
File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
long logLength = TaskLog.getTaskLogLength(conf);
cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength,
false);
process = runClient(cmd, env);
clientSocket = serverSocket.accept();
String challenge = getSecurityChallenge();
String digestToSend = createDigest(password, challenge);
String digestExpected = createDigest(password, digestToSend);
handler = new OutputHandler<K2, V2>(context, input, digestExpected);
K2 outputKey = (K2)
ReflectionUtils.newInstance(context.getOutputKeyClass(), conf);
V2 outputValue = (V2)
ReflectionUtils.newInstance(context.getOutputValueClass(), conf);
downlink = new BinaryProtocol<K1, V1, K2, V2>(clientSocket, handler,
outputKey, outputValue, conf);
downlink.authenticate(digestToSend, challenge);
waitForAuthentication();
LOG.debug("Authentication succeeded");
downlink.start();
downlink.setJobConf(conf);
}
private String getSecurityChallenge() {
Random rand = new Random(System.currentTimeMillis());
//Use 4 random integers so as to have 16 random bytes.
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(rand.nextInt(0x7fffffff));
strBuilder.append(rand.nextInt(0x7fffffff));
strBuilder.append(rand.nextInt(0x7fffffff));
strBuilder.append(rand.nextInt(0x7fffffff));
return strBuilder.toString();
}
private void writePasswordToLocalFile(String localPasswordFile,
byte[] password,
Configuration conf) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Path localPath = new Path(localPasswordFile);
FSDataOutputStream out = FileSystem.create(localFs, localPath,
new FsPermission("400"));
out.write(password);
out.close();
}
/**
* Get the downward protocol object that can send commands down to the
* application.
* @return the downlink proxy
*/
DownwardProtocol<K1, V1> getDownlink() {
return downlink;
}
/**
* Wait for authentication response.
* @throws IOException
* @throws InterruptedException
*/
void waitForAuthentication() throws IOException,
InterruptedException {
downlink.flush();
LOG.debug("Waiting for authentication response");
handler.waitForAuthentication();
}
/**
* Wait for the application to finish
* @return did the application finish correctly?
* @throws Throwable
*/
boolean waitForFinish() throws Throwable {
downlink.flush();
return handler.waitForFinish();
}
/**
* Abort the application and wait for it to finish.
* @param t the exception that signalled the problem
* @throws IOException A wrapper around the exception that was passed in
*/
void abort(Throwable t) throws IOException {
LOG.info("Aborting because of " + StringUtils.stringifyException(t));
try {
downlink.abort();
downlink.flush();
} catch (IOException e) {
// IGNORE cleanup problems
}
try {
handler.waitForFinish();
} catch (Throwable ignored) {
process.destroy();
}
IOException wrapper = new IOException("pipe child exception");
wrapper.initCause(t);
throw wrapper;
}
/**
* Clean up the child procress and socket.
* @throws IOException
*/
void cleanup() throws IOException {
serverSocket.close();
try {
downlink.close();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
/**
* Run a given command in a subprocess, including threads to copy its stdout
* and stderr to our stdout and stderr.
* @param command the command and its arguments
* @param env the environment to run the process in
* @return a handle on the process
* @throws IOException
*/
static Process runClient(List<String> command,
Map<String, String> env) throws IOException {
ProcessBuilder builder = new ProcessBuilder(command);
if (env != null) {
builder.environment().putAll(env);
}
Process result = builder.start();
return result;
}
public static String createDigest(byte[] password, String data)
throws IOException {
SecretKey key = JobTokenSecretManager.createSecretKey(password);
return SecureShuffleUtils.hashFromString(data, key);
}
}
| |
package com.empresa.marco.services;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.PersistenceException;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.empresa.marco.data.ObjectDefinationData;
import com.empresa.marco.entity.ObjectDefination;
import com.empresa.marco.exceptions.DataAccessException;
import com.empresa.marco.repository.GenericRepository;
@Service("ObjectDefinationService")
public class ObjectDefinationServiceImpl implements ObjectDefinationService {
private static Logger logger = Logger
.getLogger(ObjectDefinationServiceImpl.class);
@Autowired
GenericRepository repository;
@Override
public ObjectDefinationData getObjectDefinationById(Integer objectId)
throws DataAccessException {
ObjectDefinationData data = new ObjectDefinationData();
ObjectDefination newObjectDefination = null;
try {
newObjectDefination = repository.findObjectDefinationById(objectId);
if (newObjectDefination != null) {
data.setObjectId(newObjectDefination.getObjectId());
data.setName(newObjectDefination.getName());
data.setSchemaName(newObjectDefination.getSchemaName());
data.setDataBaseName(newObjectDefination.getDataBaseName());
data.setUserDefinedName(newObjectDefination
.getUserDefinedName());
data.setCamelCaseName(newObjectDefination.getCamelCaseName());
data.setEmbaddable(newObjectDefination.getEmbaddable());
data.setUpdated(newObjectDefination.getUpdated());
data.setPrimaryKey(newObjectDefination.getPrimaryKey());
if (newObjectDefination.getProject() != null)
data.setProjectId(newObjectDefination.getProject()
.getProjectId());
if (newObjectDefination.getProjectVersion() != null)
data.setProjectVersionId(newObjectDefination
.getProjectVersion().getProjectVersionId());
data.setCreatedBy(newObjectDefination.getCreatedBy());
data.setCreatedDate(newObjectDefination.getCreatedDate());
data.setUpdatedBy(newObjectDefination.getUpdatedBy());
data.setUpdatedDate(newObjectDefination.getUpdatedDate());
}
} catch (PersistenceException e) {
logger.error(e);
throw new DataAccessException(e);
}
return data;
}
@Override
public List<ObjectDefinationData> getAllObjectDefination(Integer projectId,
Integer projectVersionId) throws DataAccessException {
List<ObjectDefinationData> objectDefinationDataList = new ArrayList<ObjectDefinationData>();
List<ObjectDefination> objectDefinationList = null;
try {
objectDefinationList = repository.findAllObjectDefination(1, 1);
if (objectDefinationList != null && objectDefinationList.size() > 0) {
for (ObjectDefination newObjectDefination : objectDefinationList) {
ObjectDefinationData data = new ObjectDefinationData();
data.setObjectId(newObjectDefination.getObjectId());
data.setName(newObjectDefination.getName());
data.setSchemaName(newObjectDefination.getSchemaName());
data.setDataBaseName(newObjectDefination.getDataBaseName());
data.setUserDefinedName(newObjectDefination
.getUserDefinedName());
data.setCamelCaseName(newObjectDefination
.getCamelCaseName());
data.setEmbaddable(newObjectDefination.getEmbaddable());
data.setUpdated(newObjectDefination.getUpdated());
data.setPrimaryKey(newObjectDefination.getPrimaryKey());
if (newObjectDefination.getProject() != null)
data.setProjectId(newObjectDefination.getProject()
.getProjectId());
if (newObjectDefination.getProjectVersion() != null)
data.setProjectVersionId(newObjectDefination
.getProjectVersion().getProjectVersionId());
data.setCreatedBy(newObjectDefination.getCreatedBy());
data.setCreatedDate(newObjectDefination.getCreatedDate());
data.setUpdatedBy(newObjectDefination.getUpdatedBy());
data.setUpdatedDate(newObjectDefination.getUpdatedDate());
objectDefinationDataList.add(data);
}
}
} catch (PersistenceException e) {
logger.error(e);
throw new DataAccessException(e);
}
return objectDefinationDataList;
}
@Override
public ObjectDefinationData getObjectDefinationByName(String name,
Integer projectId, Integer projectVersionId)
throws DataAccessException {
ObjectDefinationData objectDefinationData = new ObjectDefinationData();
ObjectDefination objectDefination = null;
try {
objectDefination = repository.findObjectDefinationByName(name,
projectId, projectVersionId);
if (objectDefination != null) {
ObjectDefinationData data = new ObjectDefinationData();
data.setObjectId(objectDefination.getObjectId());
data.setName(objectDefination.getName());
data.setSchemaName(objectDefination.getSchemaName());
data.setDataBaseName(objectDefination.getDataBaseName());
data.setUserDefinedName(objectDefination.getUserDefinedName());
data.setCamelCaseName(objectDefination.getCamelCaseName());
data.setEmbaddable(objectDefination.getEmbaddable());
data.setUpdated(objectDefination.getUpdated());
data.setPrimaryKey(objectDefination.getPrimaryKey());
if (objectDefination.getProject() != null)
data.setProjectId(objectDefination.getProject()
.getProjectId());
if (objectDefination.getProjectVersion() != null)
data.setProjectVersionId(objectDefination
.getProjectVersion().getProjectVersionId());
data.setCreatedBy(objectDefination.getCreatedBy());
data.setCreatedDate(objectDefination.getCreatedDate());
data.setUpdatedBy(objectDefination.getUpdatedBy());
data.setUpdatedDate(objectDefination.getUpdatedDate());
}
} catch (PersistenceException e) {
logger.error(e);
throw new DataAccessException(e);
}
return objectDefinationData;
}
@Override
public List<ObjectDefinationData> getObjectDefinationByEmbaddable(
Boolean embaddable, Integer projectId, Integer projectVersionId)
throws DataAccessException {
List<ObjectDefinationData> objectDefinationDataList = new ArrayList<ObjectDefinationData>();
List<ObjectDefination> objectDefinationList = null;
try {
objectDefinationList = repository.findObjectDefinationByEmbaddable(
embaddable, projectId, projectVersionId);
if (objectDefinationList != null && objectDefinationList.size() > 0) {
for (ObjectDefination newObjectDefination : objectDefinationList) {
ObjectDefinationData data = new ObjectDefinationData();
data.setObjectId(newObjectDefination.getObjectId());
data.setName(newObjectDefination.getName());
data.setSchemaName(newObjectDefination.getSchemaName());
data.setDataBaseName(newObjectDefination.getDataBaseName());
data.setUserDefinedName(newObjectDefination
.getUserDefinedName());
data.setCamelCaseName(newObjectDefination
.getCamelCaseName());
data.setEmbaddable(newObjectDefination.getEmbaddable());
data.setUpdated(newObjectDefination.getUpdated());
data.setPrimaryKey(newObjectDefination.getPrimaryKey());
if (newObjectDefination.getProject() != null)
data.setProjectId(newObjectDefination.getProject()
.getProjectId());
if (newObjectDefination.getProjectVersion() != null)
data.setProjectVersionId(newObjectDefination
.getProjectVersion().getProjectVersionId());
data.setCreatedBy(newObjectDefination.getCreatedBy());
data.setCreatedDate(newObjectDefination.getCreatedDate());
data.setUpdatedBy(newObjectDefination.getUpdatedBy());
data.setUpdatedDate(newObjectDefination.getUpdatedDate());
objectDefinationDataList.add(data);
}
}
} catch (PersistenceException e) {
logger.error(e);
throw new DataAccessException(e);
}
return objectDefinationDataList;
}
@Override
@Transactional
public Boolean saveObjectDefination(
ObjectDefinationData objectDefinationData)
throws DataAccessException {
Boolean result = true;
try {
ObjectDefination objectDefination = new ObjectDefination();
objectDefination.setObjectId(objectDefinationData.getObjectId());
objectDefination.setName(objectDefinationData.getName());
objectDefination
.setSchemaName(objectDefinationData.getSchemaName());
objectDefination.setDataBaseName(objectDefinationData
.getDataBaseName());
objectDefination.setUserDefinedName(objectDefinationData
.getUserDefinedName());
objectDefination.setCamelCaseName(objectDefinationData
.getCamelCaseName());
objectDefination
.setEmbaddable(objectDefinationData.getEmbaddable());
objectDefination.setUpdated(objectDefinationData.getUpdated());
objectDefination
.setPrimaryKey(objectDefinationData.getPrimaryKey());
if (objectDefinationData.getProjectId() != null) {
objectDefination.setProject(repository
.findProjectById(objectDefinationData.getProjectId()));
}
if (objectDefinationData.getProjectVersionId() != null) {
objectDefination.setProjectVersion(repository
.findProjectVersionById(objectDefinationData
.getProjectVersionId()));
}
objectDefination.setCreatedBy(objectDefinationData.getCreatedBy());
objectDefination.setCreatedDate(objectDefinationData
.getCreatedDate());
objectDefination.setUpdatedBy(objectDefinationData.getUpdatedBy());
objectDefination.setUpdatedDate(objectDefinationData
.getUpdatedDate());
objectDefination.setProject(repository.findProjectById(1));
objectDefination.setProjectVersion(repository
.findProjectVersionById(1));
repository.saveData(objectDefination);
} catch (PersistenceException e) {
result = false;
logger.error(e);
throw new DataAccessException(e);
}
return result;
}
@Override
public Boolean updateObjectDefination(
ObjectDefinationData objectDefinationData)
throws DataAccessException {
Boolean result = true;
try {
ObjectDefination objectDefination = null;
if (objectDefinationData.getObjectId() != null
&& objectDefinationData.getObjectId() > 0) {
objectDefination = repository
.findObjectDefinationById(objectDefinationData
.getObjectId());
if (objectDefination == null) {
result = false;
logger.error("ObjectDefination Does Not Exist");
throw new DataAccessException(
"ObjectDefination Does Not Exist");
}
objectDefination.setName(objectDefinationData.getName());
objectDefination.setSchemaName(objectDefinationData
.getSchemaName());
objectDefination.setDataBaseName(objectDefinationData
.getDataBaseName());
objectDefination.setUserDefinedName(objectDefinationData
.getUserDefinedName());
objectDefination.setCamelCaseName(objectDefinationData
.getCamelCaseName());
objectDefination.setEmbaddable(objectDefinationData
.getEmbaddable());
objectDefination.setUpdated(objectDefinationData.getUpdated());
objectDefination.setPrimaryKey(objectDefinationData
.getPrimaryKey());
if (objectDefinationData.getProjectId() != null) {
objectDefination.setProject(repository
.findProjectById(objectDefinationData
.getProjectId()));
}
if (objectDefinationData.getProjectVersionId() != null) {
objectDefination.setProjectVersion(repository
.findProjectVersionById(objectDefinationData
.getProjectVersionId()));
}
objectDefination.setCreatedBy(objectDefinationData
.getCreatedBy());
objectDefination.setCreatedDate(objectDefinationData
.getCreatedDate());
objectDefination.setUpdatedBy(objectDefinationData
.getUpdatedBy());
objectDefination.setUpdatedDate(objectDefinationData
.getUpdatedDate());
repository.updateData(objectDefination);
}
} catch (PersistenceException e) {
result = false;
logger.error(e);
throw new DataAccessException(e);
}
return result;
}
@Override
public Boolean deleteObjectDefination(
ObjectDefinationData objectDefinationData)
throws DataAccessException {
Boolean result = true;
try {
ObjectDefination objectDefination = null;
if (objectDefinationData.getObjectId() != null
&& objectDefinationData.getObjectId() > 0) {
objectDefination = repository
.findObjectDefinationById(objectDefinationData
.getObjectId());
}
if (objectDefination == null) {
result = false;
logger.error("ObjectDefination Does Not Exist");
throw new DataAccessException("ObjectDefination Does Not Exist");
}
repository.deleteData(objectDefination);
} catch (PersistenceException e) {
result = false;
logger.error(e);
throw new DataAccessException(e);
}
return result;
}
@Override
public Boolean deleteObjectDefinationById(Integer objectId)
throws DataAccessException {
Boolean result = true;
try {
ObjectDefination objectDefination = repository
.findObjectDefinationById(objectId);
if (objectDefination == null) {
result = false;
logger.error("ObjectDefination Does Not Exist");
throw new DataAccessException("ObjectDefination Does Not Exist");
}
repository.deleteData(objectDefination);
} catch (PersistenceException e) {
result = false;
logger.error(e);
throw new DataAccessException(e);
}
return result;
}
}
| |
/*
* Copyright (C) 2013 Brian Muramatsu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.btmura.android.reddit.app;
import android.content.Context;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.content.Loader;
import android.text.TextUtils;
import com.btmura.android.reddit.content.MessageThingLoader;
import com.btmura.android.reddit.database.Kinds;
import com.btmura.android.reddit.provider.Provider;
import com.btmura.android.reddit.widget.MessageListAdapter;
class MessageThingListController
implements ThingListController<MessageListAdapter> {
static final String EXTRA_ACCOUNT_NAME = "accountName";
static final String EXTRA_MESSAGE_USER = "messageUser";
static final String EXTRA_FILTER = "filter";
static final String EXTRA_SINGLE_CHOICE = "singleChoice";
static final String EXTRA_CURSOR_EXTRAS = "cursorExtras";
private final Context context;
private final String accountName;
private final String messageUser;
private final MessageListAdapter adapter;
private int filter;
private String moreId;
private int count;
private Bundle cursorExtras;
MessageThingListController(Context context, Bundle args) {
this.context = context;
this.accountName = getAccountNameExtra(args);
this.messageUser = getMessageUserExtra(args);
this.filter = getFilterExtra(args);
this.adapter = new MessageListAdapter(context, accountName,
getSingleChoiceExtra(args));
}
@Override
public void restoreInstanceState(Bundle savedInstanceState) {
cursorExtras = savedInstanceState.getBundle(EXTRA_CURSOR_EXTRAS);
}
@Override
public void saveInstanceState(Bundle outState) {
outState.putBundle(EXTRA_CURSOR_EXTRAS, cursorExtras);
}
// Loader related methods.
@Override
public Loader<Cursor> createLoader() {
return new MessageThingLoader(context, accountName, filter, moreId, count,
cursorExtras);
}
@Override
public void swapCursor(Cursor cursor) {
moreId = null;
adapter.swapCursor(cursor);
cursorExtras = cursor != null ? cursor.getExtras() : null;
}
@Override
public ThingBundle getThingBundle(int position) {
Cursor c = adapter.getCursor();
if (c != null && c.moveToPosition(position)) {
return adapter.getThingBundle(position);
}
return null;
}
public String getMessageUser() {
return messageUser;
}
// Actions
@Override
public void onThingSelected(int position) {
if (adapter.isNew(position)) {
Provider.readMessageAsync(context, accountName, getThingId(position),
true);
}
}
// More complex getters.
@Override
public String getNextMoreId() {
Cursor c = adapter.getCursor();
if (c != null && c.moveToLast()) {
if (c.getInt(MessageThingLoader.INDEX_KIND) == Kinds.KIND_MORE) {
return c.getString(MessageThingLoader.INDEX_THING_ID);
}
}
return null;
}
@Override
public boolean hasNextMoreId() {
return !TextUtils.isEmpty(getNextMoreId());
}
@Override
public int getNextCount() {
Cursor c = adapter.getCursor();
if (c != null) {
return c.getCount() - 1;
}
return 0;
}
// Getters
@Override
public String getAccountName() {
return accountName;
}
@Override
public MessageListAdapter getAdapter() {
return adapter;
}
@Override
public int getFilter() {
return filter;
}
@Override
public String getMoreId() {
return moreId;
}
@Override
public int getCount() {
return count;
}
@Override
public boolean isSingleChoice() {
return adapter.isSingleChoice();
}
@Override
public int getSwipeAction() {
return SWIPE_ACTION_NONE;
}
// Simple setters.
@Override
public void setFilter(int filter) {
this.filter = filter;
}
@Override
public void setMoreId(String moreId) {
this.moreId = moreId;
}
@Override
public void setCount(int count) {
this.count = count;
}
@Override
public void setSelectedPosition(int position) {
adapter.setSelectedPosition(position);
}
@Override
public void setSelectedThing(String thingId, String linkId) {
adapter.setSelectedThing(thingId, linkId);
}
@Override
public void setThingBodyWidth(int thingBodyWidth) {
adapter.setThingBodyWidth(thingBodyWidth);
}
// Simple adapter getters.
private String getThingId(int position) {
return adapter.getString(position, MessageThingLoader.INDEX_THING_ID);
}
// Getters for extras.
private static String getAccountNameExtra(Bundle extras) {
return extras.getString(EXTRA_ACCOUNT_NAME);
}
private static String getMessageUserExtra(Bundle extras) {
return extras.getString(EXTRA_MESSAGE_USER);
}
private static int getFilterExtra(Bundle extras) {
return extras.getInt(EXTRA_FILTER);
}
private static boolean getSingleChoiceExtra(Bundle extras) {
return extras.getBoolean(EXTRA_SINGLE_CHOICE);
}
// TODO(btmura): Remove the need for these methods.
@Override
public String getQuery() {
return null;
}
@Override
public String getSubreddit() {
return null;
}
@Override
public void setParentSubreddit(String parentSubreddit) {
}
@Override
public void setSubreddit(String subreddit) {
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.VersionStringComparator;
import com.facebook.buck.util.environment.Platform;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import java.io.File;
import java.io.FileFilter;
import java.io.FilenameFilter;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Represents a platform to target for Android. Eventually, it should be possible to construct an
* arbitrary platform target, but currently, we only recognize a fixed set of targets.
*/
public class AndroidPlatformTarget {
public static final String DEFAULT_ANDROID_PLATFORM_TARGET = "Google Inc.:Google APIs:21";
public static final String ANDROID_VERSION_PREFIX = "android-";
/**
* {@link Supplier} for an {@link AndroidPlatformTarget} that always throws a
* {@link NoAndroidSdkException}.
*/
public static final Supplier<AndroidPlatformTarget> explodingAndroidPlatformTargetSupplier =
new Supplier<AndroidPlatformTarget>() {
@Override
public AndroidPlatformTarget get() {
throw new NoAndroidSdkException();
}
};
@VisibleForTesting
static final Pattern PLATFORM_TARGET_PATTERN = Pattern.compile(
"(?:Google Inc\\.:Google APIs:|android-)(\\d+)");
private final String name;
private final Path androidJar;
private final List<Path> bootclasspathEntries;
private final Path aaptExecutable;
private final Path adbExecutable;
private final Path aidlExecutable;
private final Path zipalignExecutable;
private final Path dxExecutable;
private final Path androidFrameworkIdlFile;
private final Path proguardJar;
private final Path proguardConfig;
private final Path optimizedProguardConfig;
private final AndroidDirectoryResolver androidDirectoryResolver;
private AndroidPlatformTarget(
String name,
Path androidJar,
List<Path> bootclasspathEntries,
Path aaptExecutable,
Path adbExecutable,
Path aidlExecutable,
Path zipalignExecutable,
Path dxExecutable,
Path androidFrameworkIdlFile,
Path proguardJar,
Path proguardConfig,
Path optimizedProguardConfig,
AndroidDirectoryResolver androidDirectoryResolver) {
this.name = name;
this.androidJar = androidJar;
this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries);
this.aaptExecutable = aaptExecutable;
this.adbExecutable = adbExecutable;
this.aidlExecutable = aidlExecutable;
this.zipalignExecutable = zipalignExecutable;
this.dxExecutable = dxExecutable;
this.androidFrameworkIdlFile = androidFrameworkIdlFile;
this.proguardJar = proguardJar;
this.proguardConfig = proguardConfig;
this.optimizedProguardConfig = optimizedProguardConfig;
this.androidDirectoryResolver = androidDirectoryResolver;
}
/**
* This is likely something like {@code "Google Inc.:Google APIs:21"}.
*/
public String getName() {
return name;
}
@Override
public String toString() {
return getName();
}
public Path getAndroidJar() {
return androidJar;
}
/**
* @return bootclasspath entries as absolute {@link Path}s
*/
public List<Path> getBootclasspathEntries() {
return bootclasspathEntries;
}
public Path getAaptExecutable() {
return aaptExecutable;
}
public Path getAdbExecutable() {
return adbExecutable;
}
public Path getAidlExecutable() {
return aidlExecutable;
}
public Path getZipalignExecutable() {
return zipalignExecutable;
}
public Path getDxExecutable() {
return dxExecutable;
}
public Path getAndroidFrameworkIdlFile() {
return androidFrameworkIdlFile;
}
public Path getProguardJar() {
return proguardJar;
}
public Path getProguardConfig() {
return proguardConfig;
}
public Path getOptimizedProguardConfig() {
return optimizedProguardConfig;
}
public Optional<Path> getNdkDirectory() {
return androidDirectoryResolver.findAndroidNdkDir();
}
/**
* @param platformId for the platform, such as "Google Inc.:Google APIs:16"
*/
public static Optional<AndroidPlatformTarget> getTargetForId(
String platformId,
AndroidDirectoryResolver androidDirectoryResolver,
Optional<Path> aaptOverride) {
Matcher platformMatcher = PLATFORM_TARGET_PATTERN.matcher(platformId);
if (platformMatcher.matches()) {
try {
int apiLevel = Integer.parseInt(platformMatcher.group(1));
Factory platformTargetFactory;
if (platformId.contains("Google APIs")) {
platformTargetFactory = new AndroidWithGoogleApisFactory();
} else {
platformTargetFactory = new AndroidWithoutGoogleApisFactory();
}
return Optional.of(
platformTargetFactory.newInstance(androidDirectoryResolver, apiLevel, aaptOverride));
} catch (NumberFormatException e) {
return Optional.absent();
}
} else {
return Optional.absent();
}
}
public static AndroidPlatformTarget getDefaultPlatformTarget(
AndroidDirectoryResolver androidDirectoryResolver,
Optional<Path> aaptOverride) {
return getTargetForId(DEFAULT_ANDROID_PLATFORM_TARGET, androidDirectoryResolver, aaptOverride)
.get();
}
private static interface Factory {
public AndroidPlatformTarget newInstance(
AndroidDirectoryResolver androidDirectoryResolver,
int apiLevel,
Optional<Path> aaptOverride);
}
/**
* Given the path to the Android SDK as well as the platform path within the Android SDK,
* find all the files needed to create the {@link AndroidPlatformTarget}, assuming that the
* organization of the Android SDK conforms to the ordinary directory structure.
*/
@VisibleForTesting
static AndroidPlatformTarget createFromDefaultDirectoryStructure(
String name,
AndroidDirectoryResolver androidDirectoryResolver,
String platformDirectoryPath,
Set<Path> additionalJarPaths,
Optional<Path> aaptOverride) {
Path androidSdkDir = androidDirectoryResolver.findAndroidSdkDir();
if (!androidSdkDir.isAbsolute()) {
throw new HumanReadableException(
"Path to Android SDK must be absolute but was: %s.",
androidSdkDir);
}
Path platformDirectory = androidSdkDir.resolve(platformDirectoryPath);
Path androidJar = platformDirectory.resolve("android.jar");
LinkedList<Path> bootclasspathEntries = Lists.newLinkedList(additionalJarPaths);
// Make sure android.jar is at the front of the bootclasspath.
bootclasspathEntries.addFirst(androidJar);
Path buildToolsDir = androidSdkDir.resolve("build-tools");
// This is the relative path under the Android SDK directory to the directory that contains the
// dx script, jack, jill, and binaries.
String buildToolsPath;
// This is the relative path under the Android SDK directory to the directory that contains the
// aapt, aidl, and zipalign binaries. Before Android SDK Build-tools 23.0.0_rc1, this was the
// same as buildToolsPath above.
String buildToolsBinPath;
if (buildToolsDir.toFile().isDirectory()) {
// In older versions of the ADT that have been upgraded via the SDK manager, the build-tools
// directory appears to contain subfolders of the form "17.0.0". However, newer versions of
// the ADT that are downloaded directly from http://developer.android.com/ appear to have
// subfolders of the form android-4.2.2. We need to support both of these scenarios.
File[] directories = buildToolsDir.toFile().listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isDirectory();
}
});
if (directories.length == 0) {
throw new HumanReadableException(
Joiner.on(System.getProperty("line.separator")).join(
"%s was empty, but should have contained a subdirectory with build tools.",
"Install them using the Android SDK Manager (%s)."),
buildToolsDir,
androidSdkDir.resolve("tools").resolve("android"));
} else {
File newestBuildToolsDir = pickNewestBuildToolsDir(ImmutableSet.copyOf(directories));
buildToolsPath = "build-tools/" + newestBuildToolsDir.getName();
if (androidSdkDir.resolve(buildToolsPath).resolve("bin").toFile().exists()) {
// Android SDK Build-tools >= 23.0.0_rc1 have executables under a new bin directory.
buildToolsBinPath = buildToolsPath + "/bin";
} else {
// Android SDK Build-tools < 23.0.0_rc1 have executables under the build-tools directory.
buildToolsBinPath = buildToolsPath;
}
}
} else {
buildToolsPath = "platform-tools";
buildToolsBinPath = buildToolsPath;
}
Path zipAlignExecutable = androidSdkDir.resolve("tools/zipalign").toAbsolutePath();
if (!zipAlignExecutable.toFile().exists()) {
// Android SDK Build-tools >= 19.1.0 have zipalign under the build-tools directory.
zipAlignExecutable =
androidSdkDir.resolve(buildToolsBinPath).resolve("zipalign").toAbsolutePath();
}
Path androidFrameworkIdlFile = platformDirectory.resolve("framework.aidl");
Path proguardJar = androidSdkDir.resolve("tools/proguard/lib/proguard.jar");
Path proguardConfig = androidSdkDir.resolve("tools/proguard/proguard-android.txt");
Path optimizedProguardConfig =
androidSdkDir.resolve("tools/proguard/proguard-android-optimize.txt");
return new AndroidPlatformTarget(
name,
androidJar.toAbsolutePath(),
bootclasspathEntries,
aaptOverride.or(androidSdkDir.resolve(buildToolsBinPath).resolve("aapt").toAbsolutePath()),
androidSdkDir.resolve("platform-tools/adb").toAbsolutePath(),
androidSdkDir.resolve(buildToolsBinPath).resolve("aidl").toAbsolutePath(),
zipAlignExecutable,
androidSdkDir.resolve(buildToolsPath).resolve(
Platform.detect() == Platform.WINDOWS ? "dx.bat" : "dx").toAbsolutePath(),
androidFrameworkIdlFile,
proguardJar,
proguardConfig,
optimizedProguardConfig,
androidDirectoryResolver);
}
private static File pickNewestBuildToolsDir(Set<File> directories) {
if (directories.size() == 1) {
return Iterables.getOnlyElement(directories);
}
List<File> apiVersionDirectories = Lists.newArrayList();
List<File> androidVersionDirectories = Lists.newArrayList();
for (File dir : directories) {
if (dir.getName().startsWith(ANDROID_VERSION_PREFIX)) {
androidVersionDirectories.add(dir);
} else {
apiVersionDirectories.add(dir);
}
}
final VersionStringComparator comparator = new VersionStringComparator();
// API version directories are downloaded by the package manager, whereas Android version
// directories are bundled with the SDK when it's unpacked. So API version directories will
// presumably be newer.
if (!apiVersionDirectories.isEmpty()) {
Collections.sort(apiVersionDirectories, new Comparator<File>() {
@Override
public int compare(File a, File b) {
String versionA = a.getName();
String versionB = b.getName();
return comparator.compare(versionA, versionB);
}
});
// Return the last element in the list.
return apiVersionDirectories.get(apiVersionDirectories.size() - 1);
} else {
Collections.sort(androidVersionDirectories, new Comparator<File>() {
@Override
public int compare(File a, File b) {
String versionA = a.getName().substring(ANDROID_VERSION_PREFIX.length());
String versionB = b.getName().substring(ANDROID_VERSION_PREFIX.length());
return comparator.compare(versionA, versionB);
}
});
// Return the last element in the list.
return androidVersionDirectories.get(androidVersionDirectories.size() - 1);
}
}
/**
* Factory to build an AndroidPlatformTarget that corresponds to a given Google API level.
*/
private static class AndroidWithGoogleApisFactory implements Factory {
private static final String API_DIR_SUFFIX = "(?:-([0-9]+))*";
@Override
public AndroidPlatformTarget newInstance(
final AndroidDirectoryResolver androidDirectoryResolver,
final int apiLevel,
Optional<Path> aaptOverride) {
// TODO(natthu): Use Paths instead of Strings everywhere in this file.
Path androidSdkDir = androidDirectoryResolver.findAndroidSdkDir();
File addonsParentDir = androidSdkDir.resolve("add-ons").toFile();
String apiDirPrefix = String.format("addon-google_apis-google-%d", apiLevel);
final Pattern apiDirPattern = Pattern.compile(apiDirPrefix + API_DIR_SUFFIX);
if (addonsParentDir.isDirectory()) {
String[] addonsApiDirs = addonsParentDir.list(
new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return apiDirPattern.matcher(name).matches();
}
});
Arrays.sort(addonsApiDirs, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return getVersion(o1) - getVersion(o2);
}
private int getVersion(String dirName) {
Matcher matcher = apiDirPattern.matcher(dirName);
Preconditions.checkState(matcher.matches());
if (matcher.group(1) != null) {
return Integer.parseInt(matcher.group(1));
}
return 0;
}
});
ImmutableSet.Builder<Path> additionalJarPaths = ImmutableSet.builder();
for (String dir : addonsApiDirs) {
File libsDir = new File(addonsParentDir, dir + "/libs");
String[] addonFiles;
if (libsDir.isDirectory() &&
(addonFiles = libsDir.list(new AddonFilter())) != null &&
addonFiles.length != 0) {
Arrays.sort(addonFiles);
for (String addonJar : addonFiles) {
additionalJarPaths.add(libsDir.toPath().resolve(addonJar));
}
return createFromDefaultDirectoryStructure(
String.format("Google Inc.:Google APIs:%d", apiLevel),
androidDirectoryResolver,
String.format("platforms/android-%d", apiLevel),
additionalJarPaths.build(),
aaptOverride);
}
}
}
throw new HumanReadableException(
"Google APIs not found in %s.\n" +
"Please run '%s/tools/android sdk' and select both 'SDK Platform' and " +
"'Google APIs' under Android (API %d)",
new File(addonsParentDir, apiDirPrefix + "/libs").getAbsolutePath(),
androidSdkDir,
apiLevel);
}
}
private static class AndroidWithoutGoogleApisFactory implements Factory {
@Override
public AndroidPlatformTarget newInstance(
final AndroidDirectoryResolver androidDirectoryResolver,
final int apiLevel,
Optional<Path> aaptOverride) {
return createFromDefaultDirectoryStructure(
String.format("android-%d", apiLevel),
androidDirectoryResolver,
String.format("platforms/android-%d", apiLevel),
/* additionalJarPaths */ ImmutableSet.<Path>of(),
aaptOverride);
}
}
private static class AddonFilter implements FilenameFilter {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".jar");
}
}
}
| |
/*
* Copyright (c) 2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.apimgt.impl.utils;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.impl.APIConstants;
import javax.xml.namespace.QName;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class APIDescriptionGenUtil {
/**
* Class Logger
*/
private static Log log = LogFactory.getLog(APIDescriptionGenUtil.class);
private static final String DESCRIPTION = "Allows [1] request(s) per minute.";
public static String generateDescriptionFromPolicy(OMElement policy) throws APIManagementException {
//Here as the method is about extracting some info from the policy. And it's not concern on compliance to
// specification. So it just extract the required element.
OMElement maxCount;
OMElement timeUnit;
long requestPerMinute;
try {
maxCount = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).getFirstChildWithName
(APIConstants
.THROTTLE_CONTROL_ELEMENT).getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT);
timeUnit = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).getFirstChildWithName
(APIConstants
.THROTTLE_CONTROL_ELEMENT).getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_UNIT_TIME_ELEMENT);
//Here we will assume time unit provided as milli second and do calculation to get requests per minute.
if (maxCount.getText().isEmpty() || timeUnit.getText().isEmpty()) {
String msg = APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT + " or "
+ APIConstants.THROTTLE_UNIT_TIME_ELEMENT + " element data found empty in " +
"the policy.";
log.warn(msg);
throw new APIManagementException(msg);
}
requestPerMinute = (Long.parseLong(maxCount.getText().trim()) * 60000) / (Long.parseLong(timeUnit.getText().trim()));
if (requestPerMinute >= 1) {
return DESCRIPTION.replaceAll("\\[1\\]", Long.toString(requestPerMinute));
}
return DESCRIPTION;
} catch (NullPointerException npe) {
String msg = "Policy could not be parsed correctly based on http://schemas.xmlsoap.org/ws/2004/09/policy " +
"specification";
log.error(msg, npe);
throw new APIManagementException(msg, npe);
}
}
/**
* This method is used to get Allowed Requests count for a tier
*
* @param policy tier policy
* @return Allowed Requests Count
* @throws APIManagementException if error occurs when processing XML
*/
public static long getAllowedRequestCount(OMElement policy) throws APIManagementException {
try {
OMElement maxCount = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_CONTROL_ELEMENT).
getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT);
if (maxCount.getText().isEmpty()) {
String message = APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT.getLocalPart()
+ " element data not found empty in the policy.";
log.warn(message);
throw new APIManagementException(message);
}
return Long.parseLong(maxCount.getText().trim());
} catch (OMException e) {
//We capture the runtime exception here.
String errorMessage = "Policy could not be parsed correctly based on " +
"http://schemas.xmlsoap.org/ws/2004/09/policy specification";
log.error(errorMessage, e);
throw new APIManagementException(errorMessage + e.getMessage());
} catch (NumberFormatException e) {
log.error("Error in retrieving request count in tier xml.", e);
throw new APIManagementException("Error in retrieving request count in tier xml." + e.getMessage());
}
}
/**
* The method to extract the tier attributes from each tier level policy definitions
* @param policy Tier level policy
* @return Attributes map
* @throws APIManagementException
*/
public static Map<String, Object> getTierAttributes(OMElement policy) throws APIManagementException {
Map<String, Object> attributesMap = new HashMap<String, Object>();
OMElement attributes = null;
try {
OMElement tier = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).getFirstChildWithName
(APIConstants.THROTTLE_CONTROL_ELEMENT).getFirstChildWithName(APIConstants.POLICY_ELEMENT)
.getFirstChildWithName(APIConstants.POLICY_ELEMENT);
if (tier != null) {
attributes = tier.getFirstChildWithName(APIConstants.THROTTLE_ATTRIBUTES_ELEMENT);
}
if (attributes == null) {
return attributesMap;
} else {
for (Iterator childElements = attributes.getChildElements(); childElements.hasNext(); ) {
OMElement element = (OMElement) childElements.next();
String displayName = element.getAttributeValue(
new QName(APIConstants.THROTTLE_ATTRIBUTE_DISPLAY_NAME));
String localName = element.getLocalName();
String attrName = (displayName != null ? displayName : localName); //If displayName not defined,
// use the attribute name
String attrValue = element.getText();
attributesMap.put(attrName, attrValue);
}
}
} catch (NullPointerException e) {
String errorMessage = "Policy could not be parsed correctly based on " +
"http://schemas.xmlsoap.org/ws/2004/09/policy specification";
log.error(errorMessage, e);
throw new APIManagementException(errorMessage + e.getMessage());
}
return attributesMap;
}
/**
* This method gives the allowed request count for a minute
*
* @param policy The tier level policy
* @return The request count for a minute((maxCount * 60000)/timeDuration)
* @throws APIManagementException if policy or parsing error occurs
*/
public static long getAllowedCountPerMinute(OMElement policy) throws APIManagementException {
//Here as the method is about extracting some info from the policy. And it's not concern on compliance to
// specification. So it just extract the required element.
OMElement maxCount;
OMElement timeUnit;
long requestPerMinute;
try {
maxCount = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).getFirstChildWithName
(APIConstants
.THROTTLE_CONTROL_ELEMENT).getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT);
timeUnit = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).getFirstChildWithName
(APIConstants
.THROTTLE_CONTROL_ELEMENT).getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_UNIT_TIME_ELEMENT);
//Here we will assume time unit provided as milli second and do calculation to get requests per minute.
if (maxCount.getText().isEmpty() || timeUnit.getText().isEmpty()) {
String errorMessage = APIConstants.THROTTLE_MAXIMUM_COUNT_ELEMENT + "or"
+ APIConstants.THROTTLE_UNIT_TIME_ELEMENT + " element data found empty in " +
"the policy.";
log.warn(errorMessage);
throw new APIManagementException(errorMessage);
}
return (Long.parseLong(maxCount.getText().trim()) * 60000) /
(Long.parseLong(timeUnit.getText().trim()));
} catch (NullPointerException e) {
String errorMessage = "Policy could not be parsed correctly based on " +
"http://schemas.xmlsoap.org/ws/2004/09/policy specification";
log.error(errorMessage, e);
throw new APIManagementException(errorMessage + e.getMessage());
}
}
/**
* This method is used to get time duration of a tier
*
* @param policy tier policy
* @return time duration for requests
* @throws APIManagementException if error occurs when processing XML
*/
public static long getTimeDuration(OMElement policy) throws APIManagementException {
try {
OMElement duration = policy.getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_CONTROL_ELEMENT).
getFirstChildWithName(APIConstants.POLICY_ELEMENT).
getFirstChildWithName(APIConstants.THROTTLE_UNIT_TIME_ELEMENT);
if (duration.getText().isEmpty()) {
String message = APIConstants.THROTTLE_UNIT_TIME_ELEMENT.getLocalPart() +
" element data not found empty in the policy.";
log.warn(message);
throw new APIManagementException(message);
}
// We return the milliseconds value as it is.
// Reason - We need the ability to do fine grained throttling configurations.
return Long.parseLong(duration.getText().trim());
} catch (OMException e) {
String errorMessage = "Policy could not be parsed correctly based on " +
"http://schemas.xmlsoap.org/ws/2004/09/policy specification";
log.error(errorMessage, e);
throw new APIManagementException(errorMessage, e);
} catch (NumberFormatException e) {
String message = "Error in retrieving time duration from the tiers xml";
log.error(message, e);
throw new APIManagementException(message, e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.utils.actors;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.locks.LockSupport;
import java.util.function.Consumer;
import org.jboss.logging.Logger;
public abstract class ProcessorBase<T> extends HandlerBase {
private static final Logger logger = Logger.getLogger(ProcessorBase.class);
public static final int STATE_NOT_RUNNING = 0;
public static final int STATE_RUNNING = 1;
public static final int STATE_FORCED_SHUTDOWN = 2;
protected final Queue<T> tasks = new ConcurrentLinkedQueue<>();
private final Executor delegate;
/**
* Using a method reference instead of an inner classes allows the caller to reduce the pointer chasing
* when accessing ProcessorBase.this fields/methods.
*/
private final Runnable task = this::executePendingTasks;
// used by stateUpdater
@SuppressWarnings("unused")
private volatile int state = STATE_NOT_RUNNING;
// Request of forced shutdown
private volatile boolean requestedForcedShutdown = false;
// Request of educated shutdown:
private volatile boolean requestedShutdown = false;
private static final AtomicIntegerFieldUpdater<ProcessorBase> stateUpdater = AtomicIntegerFieldUpdater.newUpdater(ProcessorBase.class, "state");
private void executePendingTasks() {
do {
//if there is no thread active and is not already dead then we run
if (stateUpdater.compareAndSet(this, STATE_NOT_RUNNING, STATE_RUNNING)) {
enter();
try {
T task;
//while the queue is not empty we process in order:
//if requestedForcedShutdown==true than no new tasks will be drained from the tasks q.
while (!requestedForcedShutdown && (task = tasks.poll()) != null) {
doTask(task);
}
} finally {
leave();
//set state back to not running if possible: shutdownNow could be called by doTask(task).
//If a shutdown has happened there is no need to continue polling tasks
if (!stateUpdater.compareAndSet(this, STATE_RUNNING, STATE_NOT_RUNNING)) {
return;
}
}
} else {
return;
}
//we loop again based on tasks not being empty. Otherwise there is a window where the state is running,
//but poll() has returned null, so a submitting thread will believe that it does not need re-execute.
//this check fixes the issue
}
while (!tasks.isEmpty() && !requestedShutdown);
}
/**
* It will shutdown and wait 30 seconds for timeout.
*/
public void shutdown() {
shutdown(30, TimeUnit.SECONDS);
}
public void shutdown(long timeout, TimeUnit unit) {
requestedShutdown = true;
if (!inHandler()) {
// if it's in handler.. we just return
flush(timeout, unit);
}
}
/**
* It will wait the current execution (if there is one) to finish
* but will not complete any further executions
*/
public int shutdownNow(Consumer<? super T> onPendingItem) {
//alert anyone that has been requested (at least) an immediate shutdown
requestedForcedShutdown = true;
requestedShutdown = true;
if (inHandler()) {
stateUpdater.set(this, STATE_FORCED_SHUTDOWN);
} else {
//it could take a very long time depending on the current executing task
do {
//alert the ExecutorTask (if is running) to just drain the current backlog of tasks
final int startState = stateUpdater.get(this);
if (startState == STATE_FORCED_SHUTDOWN) {
//another thread has completed a forced shutdown: let it to manage the tasks cleanup
break;
}
if (startState == STATE_RUNNING) {
//wait 100 ms to avoid burning CPU while waiting and
//give other threads a chance to make progress
LockSupport.parkNanos(100_000_000L);
}
}
while (!stateUpdater.compareAndSet(this, STATE_NOT_RUNNING, STATE_FORCED_SHUTDOWN));
//this could happen just one time: the forced shutdown state is the last one and
//can be set by just one caller.
//As noted on the execute method there is a small chance that some tasks would be enqueued
}
int pendingItems = 0;
//there is a small chance that execute() could race with this cleanup: the lock allow an all-or-nothing behaviour between them
synchronized (tasks) {
T item;
while ((item = tasks.poll()) != null) {
onPendingItem.accept(item);
pendingItems++;
}
}
return pendingItems;
}
protected abstract void doTask(T task);
public ProcessorBase(Executor parent) {
this.delegate = parent;
}
public final boolean isFlushed() {
return this.state == STATE_NOT_RUNNING;
}
/**
* WARNING: This will only flush when all the activity is suspended.
* don't expect success on this call if another thread keeps feeding the queue
* this is only valid on situations where you are not feeding the queue,
* like in shutdown and failover situations.
*/
public final boolean flush(long timeout, TimeUnit unit) {
if (this.state == STATE_NOT_RUNNING) {
// quick test, most of the time it will be empty anyways
return true;
}
long timeLimit = System.currentTimeMillis() + unit.toMillis(timeout);
try {
while (this.state == STATE_RUNNING && timeLimit > System.currentTimeMillis()) {
if (tasks.isEmpty()) {
return true;
}
Thread.sleep(10);
}
} catch (InterruptedException e) {
// ignored
}
return this.state == STATE_NOT_RUNNING;
}
protected void task(T command) {
if (requestedShutdown) {
logAddOnShutdown();
}
//The shutdown process could finish right after the above check: shutdownNow can drain the remaining tasks
tasks.add(command);
//cache locally the state to avoid multiple volatile loads
final int state = stateUpdater.get(this);
if (state != STATE_RUNNING) {
onAddedTaskIfNotRunning(state);
}
}
/**
* This has to be called on the assumption that state!=STATE_RUNNING.
* It is packed separately from {@link #task(Object)} just for performance reasons: it
* handles the uncommon execution cases for bursty scenarios i.e. the slowest execution path.
*/
private void onAddedTaskIfNotRunning(int state) {
if (state == STATE_NOT_RUNNING) {
//startPoller could be deleted but is maintained because is inherited
delegate.execute(task);
} else if (state == STATE_FORCED_SHUTDOWN) {
//help the GC by draining any task just submitted: it helps to cover the case of a shutdownNow finished before tasks.add
synchronized (tasks) {
tasks.clear();
}
}
}
private static void logAddOnShutdown() {
if (logger.isDebugEnabled()) {
logger.debug("Ordered executor has been gently shutdown at", new Exception("debug"));
}
}
/**
* Returns the remaining items to be processed.
* <p>
* This method is safe to be called by different threads and its accuracy is subject to concurrent modifications.<br>
* It is meant to be used only for test purposes, because of its {@code O(n)} cost.
*/
public final int remaining() {
return tasks.size();
}
public final int status() {
//avoid using the updater because in older version of JDK 8 isn't optimized as a vanilla volatile get
return this.state;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.cosmos.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.cosmos.fluent.CollectionPartitionsClient;
import com.azure.resourcemanager.cosmos.fluent.models.PartitionMetricInner;
import com.azure.resourcemanager.cosmos.fluent.models.PartitionUsageInner;
import com.azure.resourcemanager.cosmos.models.PartitionMetricListResult;
import com.azure.resourcemanager.cosmos.models.PartitionUsagesResult;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in CollectionPartitionsClient. */
public final class CollectionPartitionsClientImpl implements CollectionPartitionsClient {
private final ClientLogger logger = new ClientLogger(CollectionPartitionsClientImpl.class);
/** The proxy service used to perform REST calls. */
private final CollectionPartitionsService service;
/** The service client containing this operation class. */
private final CosmosDBManagementClientImpl client;
/**
* Initializes an instance of CollectionPartitionsClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
CollectionPartitionsClientImpl(CosmosDBManagementClientImpl client) {
this.service =
RestProxy
.create(CollectionPartitionsService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for CosmosDBManagementClientCollectionPartitions to be used by the proxy
* service to perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "CosmosDBManagementCl")
private interface CollectionPartitionsService {
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB"
+ "/databaseAccounts/{accountName}/databases/{databaseRid}/collections/{collectionRid}/partitions"
+ "/metrics")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<PartitionMetricListResult>> listMetrics(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("accountName") String accountName,
@PathParam("databaseRid") String databaseRid,
@PathParam("collectionRid") String collectionRid,
@QueryParam("api-version") String apiVersion,
@QueryParam("$filter") String filter,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB"
+ "/databaseAccounts/{accountName}/databases/{databaseRid}/collections/{collectionRid}/partitions"
+ "/usages")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<PartitionUsagesResult>> listUsages(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("accountName") String accountName,
@PathParam("databaseRid") String databaseRid,
@PathParam("collectionRid") String collectionRid,
@QueryParam("api-version") String apiVersion,
@QueryParam("$filter") String filter,
Context context);
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<PartitionMetricInner>> listMetricsSinglePageAsync(
String resourceGroupName, String accountName, String databaseRid, String collectionRid, String filter) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (accountName == null) {
return Mono.error(new IllegalArgumentException("Parameter accountName is required and cannot be null."));
}
if (databaseRid == null) {
return Mono.error(new IllegalArgumentException("Parameter databaseRid is required and cannot be null."));
}
if (collectionRid == null) {
return Mono.error(new IllegalArgumentException("Parameter collectionRid is required and cannot be null."));
}
if (filter == null) {
return Mono.error(new IllegalArgumentException("Parameter filter is required and cannot be null."));
}
final String apiVersion = "2019-08-01";
return FluxUtil
.withContext(
context ->
service
.listMetrics(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
accountName,
databaseRid,
collectionRid,
apiVersion,
filter,
context))
.<PagedResponse<PartitionMetricInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<PartitionMetricInner>> listMetricsSinglePageAsync(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (accountName == null) {
return Mono.error(new IllegalArgumentException("Parameter accountName is required and cannot be null."));
}
if (databaseRid == null) {
return Mono.error(new IllegalArgumentException("Parameter databaseRid is required and cannot be null."));
}
if (collectionRid == null) {
return Mono.error(new IllegalArgumentException("Parameter collectionRid is required and cannot be null."));
}
if (filter == null) {
return Mono.error(new IllegalArgumentException("Parameter filter is required and cannot be null."));
}
final String apiVersion = "2019-08-01";
context = this.client.mergeContext(context);
return service
.listMetrics(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
accountName,
databaseRid,
collectionRid,
apiVersion,
filter,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null));
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<PartitionMetricInner> listMetricsAsync(
String resourceGroupName, String accountName, String databaseRid, String collectionRid, String filter) {
return new PagedFlux<>(
() -> listMetricsSinglePageAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter));
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<PartitionMetricInner> listMetricsAsync(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
return new PagedFlux<>(
() ->
listMetricsSinglePageAsync(
resourceGroupName, accountName, databaseRid, collectionRid, filter, context));
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<PartitionMetricInner> listMetrics(
String resourceGroupName, String accountName, String databaseRid, String collectionRid, String filter) {
return new PagedIterable<>(
listMetricsAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter));
}
/**
* Retrieves the metrics determined by the given filter for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of metrics to return. The parameters that can be
* filtered are name.value (name of the metric, can have an or of multiple names), startTime, endTime, and
* timeGrain. The supported operator is eq.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition metrics request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<PartitionMetricInner> listMetrics(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
return new PagedIterable<>(
listMetricsAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter, context));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of usages to return. The supported parameter is
* name.value (name of the metric, can have an or of multiple names).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<PartitionUsageInner>> listUsagesSinglePageAsync(
String resourceGroupName, String accountName, String databaseRid, String collectionRid, String filter) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (accountName == null) {
return Mono.error(new IllegalArgumentException("Parameter accountName is required and cannot be null."));
}
if (databaseRid == null) {
return Mono.error(new IllegalArgumentException("Parameter databaseRid is required and cannot be null."));
}
if (collectionRid == null) {
return Mono.error(new IllegalArgumentException("Parameter collectionRid is required and cannot be null."));
}
final String apiVersion = "2019-08-01";
return FluxUtil
.withContext(
context ->
service
.listUsages(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
accountName,
databaseRid,
collectionRid,
apiVersion,
filter,
context))
.<PagedResponse<PartitionUsageInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of usages to return. The supported parameter is
* name.value (name of the metric, can have an or of multiple names).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<PartitionUsageInner>> listUsagesSinglePageAsync(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (accountName == null) {
return Mono.error(new IllegalArgumentException("Parameter accountName is required and cannot be null."));
}
if (databaseRid == null) {
return Mono.error(new IllegalArgumentException("Parameter databaseRid is required and cannot be null."));
}
if (collectionRid == null) {
return Mono.error(new IllegalArgumentException("Parameter collectionRid is required and cannot be null."));
}
final String apiVersion = "2019-08-01";
context = this.client.mergeContext(context);
return service
.listUsages(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
accountName,
databaseRid,
collectionRid,
apiVersion,
filter,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of usages to return. The supported parameter is
* name.value (name of the metric, can have an or of multiple names).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<PartitionUsageInner> listUsagesAsync(
String resourceGroupName, String accountName, String databaseRid, String collectionRid, String filter) {
return new PagedFlux<>(
() -> listUsagesSinglePageAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<PartitionUsageInner> listUsagesAsync(
String resourceGroupName, String accountName, String databaseRid, String collectionRid) {
final String filter = null;
return new PagedFlux<>(
() -> listUsagesSinglePageAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of usages to return. The supported parameter is
* name.value (name of the metric, can have an or of multiple names).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<PartitionUsageInner> listUsagesAsync(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
return new PagedFlux<>(
() ->
listUsagesSinglePageAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter, context));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @param filter An OData filter expression that describes a subset of usages to return. The supported parameter is
* name.value (name of the metric, can have an or of multiple names).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<PartitionUsageInner> listUsages(
String resourceGroupName,
String accountName,
String databaseRid,
String collectionRid,
String filter,
Context context) {
return new PagedIterable<>(
listUsagesAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter, context));
}
/**
* Retrieves the usages (most recent storage data) for the given collection, split by partition.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Cosmos DB database account name.
* @param databaseRid Cosmos DB database rid.
* @param collectionRid Cosmos DB collection rid.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a list partition level usage request.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<PartitionUsageInner> listUsages(
String resourceGroupName, String accountName, String databaseRid, String collectionRid) {
final String filter = null;
return new PagedIterable<>(listUsagesAsync(resourceGroupName, accountName, databaseRid, collectionRid, filter));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.vectorreader;
import java.math.BigDecimal;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.CarbonDictionary;
import org.apache.carbondata.core.scan.scanner.LazyPageLoader;
import org.apache.spark.sql.CarbonVectorProxy;
import org.apache.spark.sql.carbondata.execution.datasources.CarbonSparkDataSourceUtil;
import org.apache.spark.sql.types.Decimal;
/**
* Fills the vector directly with out considering any deleted rows.
*/
class ColumnarVectorWrapperDirect implements CarbonColumnVector {
/**
* It is one column vector adapter class.
*/
protected CarbonVectorProxy.ColumnVectorProxy sparkColumnVectorProxy;
/**
* It is adapter class of complete ColumnarBatch.
*/
protected CarbonVectorProxy carbonVectorProxy;
protected int ordinal;
protected boolean isDictionary;
private DataType blockDataType;
private CarbonColumnVector dictionaryVector;
ColumnarVectorWrapperDirect(CarbonVectorProxy writableColumnVector, int ordinal) {
this.sparkColumnVectorProxy = writableColumnVector.getColumnVector(ordinal);
this.carbonVectorProxy = writableColumnVector;
this.ordinal = ordinal;
}
@Override
public void putBoolean(int rowId, boolean value) {
sparkColumnVectorProxy.putBoolean(rowId, value);
}
@Override
public void putFloat(int rowId, float value) {
sparkColumnVectorProxy.putFloat(rowId, value);
}
@Override
public void putShort(int rowId, short value) {
sparkColumnVectorProxy.putShort(rowId, value);
}
@Override
public void putShorts(int rowId, int count, short value) {
sparkColumnVectorProxy.putShorts(rowId, count, value);
}
@Override
public void putInt(int rowId, int value) {
if (isDictionary) {
sparkColumnVectorProxy.putDictionaryInt(rowId, value);
} else {
sparkColumnVectorProxy.putInt(rowId, value);
}
}
@Override
public void putInts(int rowId, int count, int value) {
sparkColumnVectorProxy.putInts(rowId, count, value);
}
@Override
public void putLong(int rowId, long value) {
sparkColumnVectorProxy.putLong(rowId, value);
}
@Override
public void putLongs(int rowId, int count, long value) {
sparkColumnVectorProxy.putLongs(rowId, count, value);
}
@Override
public void putDecimal(int rowId, BigDecimal value, int precision) {
Decimal toDecimal = Decimal.apply(value);
sparkColumnVectorProxy.putDecimal(rowId, toDecimal, precision);
}
@Override
public void putDecimals(int rowId, int count, BigDecimal value, int precision) {
Decimal decimal = Decimal.apply(value);
for (int i = 0; i < count; i++) {
sparkColumnVectorProxy.putDecimal(rowId, decimal, precision);
rowId++;
}
}
@Override
public void putDouble(int rowId, double value) {
sparkColumnVectorProxy.putDouble(rowId, value);
}
@Override
public void putDoubles(int rowId, int count, double value) {
sparkColumnVectorProxy.putDoubles(rowId, count, value);
}
@Override
public void putByteArray(int rowId, byte[] value) {
sparkColumnVectorProxy.putByteArray(rowId, value, 0, value.length);
}
@Override
public void putByteArray(int rowId, int count, byte[] value) {
for (int i = 0; i < count; i++) {
sparkColumnVectorProxy.putByteArray(rowId, value);
rowId++;
}
}
@Override
public void putByteArray(int rowId, int offset, int length, byte[] value) {
sparkColumnVectorProxy.putByteArray(rowId, value, offset, length);
}
@Override
public void putNull(int rowId) {
sparkColumnVectorProxy.putNull(rowId);
}
@Override
public void putNulls(int rowId, int count) {
sparkColumnVectorProxy.putNulls(rowId, count);
}
@Override
public void putNotNull(int rowId) {
sparkColumnVectorProxy.putNotNull(rowId);
}
@Override
public void putNotNull(int rowId, int count) {
sparkColumnVectorProxy.putNotNulls(rowId, count);
}
@Override
public boolean isNull(int rowId) {
return sparkColumnVectorProxy.isNullAt(rowId);
}
@Override
public void putObject(int rowId, Object obj) {
//TODO handle complex types
}
@Override
public Object getData(int rowId) {
//TODO handle complex types
return null;
}
@Override
public void reset() {
if (null != dictionaryVector) {
dictionaryVector.reset();
}
}
@Override
public DataType getType() {
return CarbonSparkDataSourceUtil
.convertSparkToCarbonDataType(sparkColumnVectorProxy.dataType());
}
@Override
public DataType getBlockDataType() {
return blockDataType;
}
@Override
public void setBlockDataType(DataType blockDataType) {
this.blockDataType = blockDataType;
}
@Override
public void setDictionary(CarbonDictionary dictionary) {
sparkColumnVectorProxy.setDictionary(dictionary);
}
@Override
public boolean hasDictionary() {
return sparkColumnVectorProxy.hasDictionary();
}
public void reserveDictionaryIds() {
sparkColumnVectorProxy.reserveDictionaryIds(carbonVectorProxy.numRows());
dictionaryVector = new ColumnarVectorWrapperDirect(carbonVectorProxy, ordinal);
((ColumnarVectorWrapperDirect) dictionaryVector).isDictionary = true;
}
@Override
public CarbonColumnVector getDictionaryVector() {
return dictionaryVector;
}
@Override
public void putByte(int rowId, byte value) {
sparkColumnVectorProxy.putByte(rowId, value);
}
@Override
public void setFilteredRowsExist(boolean filteredRowsExist) {
}
@Override
public void putFloats(int rowId, int count, float[] src, int srcIndex) {
sparkColumnVectorProxy.putFloats(rowId, count, src, srcIndex);
}
@Override
public void putShorts(int rowId, int count, short[] src, int srcIndex) {
sparkColumnVectorProxy.putShorts(rowId, count, src, srcIndex);
}
@Override
public void putInts(int rowId, int count, int[] src, int srcIndex) {
sparkColumnVectorProxy.putInts(rowId, count, src, srcIndex);
}
@Override
public void putLongs(int rowId, int count, long[] src, int srcIndex) {
sparkColumnVectorProxy.putLongs(rowId, count, src, srcIndex);
}
@Override
public void putDoubles(int rowId, int count, double[] src, int srcIndex) {
sparkColumnVectorProxy.putDoubles(rowId, count, src, srcIndex);
}
@Override
public void putBytes(int rowId, int count, byte[] src, int srcIndex) {
sparkColumnVectorProxy.putBytes(rowId, count, src, srcIndex);
}
@Override
public void setLazyPage(LazyPageLoader lazyPage) {
sparkColumnVectorProxy.setLazyPage(lazyPage);
}
@Override
public void putArray(int rowId, int offset, int length) {
sparkColumnVectorProxy.putArray(rowId, offset, length);
}
@Override
public void putAllByteArray(byte[] data, int offset, int length) {
sparkColumnVectorProxy.putAllByteArray(data, offset, length);
}
}
| |
/*
* Copyright (c) 2016 Nike, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nike.cerberus.domain;
import com.nike.cerberus.validation.UniqueIamRolePermissions;
import com.nike.cerberus.validation.UniqueOwner;
import com.nike.cerberus.validation.UniqueUserGroupPermissions;
import com.nike.cerberus.validation.group.Updatable;
import org.hibernate.validator.constraints.Length;
import org.hibernate.validator.constraints.NotBlank;
import javax.validation.Valid;
import javax.validation.groups.Default;
import java.time.OffsetDateTime;
import java.util.HashSet;
import java.util.Set;
/**
* Represents a logical grouping of secrets.
*/
@UniqueOwner(groups = {Default.class, Updatable.class})
public class SafeDepositBoxV1 implements SafeDepositBox {
private String id;
@NotBlank(message = "SDB_CATEGORY_ID_INVALID")
private String categoryId;
@NotBlank(message = "SDB_NAME_BLANK")
@Length(max = 100, message = "SDB_NAME_TOO_LONG")
private String name;
@Length(max = 1000, message = "SDB_DESCRIPTION_TOO_LONG", groups = {Default.class, Updatable.class})
private String description;
private String path;
private OffsetDateTime createdTs;
private OffsetDateTime lastUpdatedTs;
private String createdBy;
private String lastUpdatedBy;
@NotBlank(message = "SDB_OWNER_BLANK", groups = {Default.class, Updatable.class})
@Length(max = 255, message = "SDB_OWNER_TOO_LONG", groups = {Default.class, Updatable.class})
private String owner;
@Valid
@UniqueUserGroupPermissions(groups = {Default.class, Updatable.class})
private Set<UserGroupPermission> userGroupPermissions = new HashSet<>();
@Valid
@UniqueIamRolePermissions(groups = {Default.class, Updatable.class})
private Set<IamRolePermission> iamRolePermissions = new HashSet<>();
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCategoryId() {
return categoryId;
}
public void setCategoryId(String categoryId) {
this.categoryId = categoryId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public OffsetDateTime getCreatedTs() {
return createdTs;
}
public void setCreatedTs(OffsetDateTime createdTs) {
this.createdTs = createdTs;
}
public OffsetDateTime getLastUpdatedTs() {
return lastUpdatedTs;
}
public void setLastUpdatedTs(OffsetDateTime lastUpdatedTs) {
this.lastUpdatedTs = lastUpdatedTs;
}
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public String getLastUpdatedBy() {
return lastUpdatedBy;
}
public void setLastUpdatedBy(String lastUpdatedBy) {
this.lastUpdatedBy = lastUpdatedBy;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public Set<UserGroupPermission> getUserGroupPermissions() {
return userGroupPermissions;
}
public void setUserGroupPermissions(Set<UserGroupPermission> userGroupPermissions) {
this.userGroupPermissions = userGroupPermissions;
}
public Set<IamRolePermission> getIamRolePermissions() {
return iamRolePermissions;
}
public void setIamRolePermissions(Set<IamRolePermission> iamRolePermissions) {
this.iamRolePermissions = iamRolePermissions;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SafeDepositBoxV1 that = (SafeDepositBoxV1) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (categoryId != null ? !categoryId.equals(that.categoryId) : that.categoryId != null) return false;
if (name != null ? !name.equals(that.name) : that.name != null) return false;
if (description != null ? !description.equals(that.description) : that.description != null) return false;
if (path != null ? !path.equals(that.path) : that.path != null) return false;
if (createdTs != null ? !createdTs.equals(that.createdTs) : that.createdTs != null) return false;
if (lastUpdatedTs != null ? !lastUpdatedTs.equals(that.lastUpdatedTs) : that.lastUpdatedTs != null)
return false;
if (createdBy != null ? !createdBy.equals(that.createdBy) : that.createdBy != null) return false;
if (lastUpdatedBy != null ? !lastUpdatedBy.equals(that.lastUpdatedBy) : that.lastUpdatedBy != null)
return false;
if (owner != null ? !owner.equals(that.owner) : that.owner != null) return false;
if (userGroupPermissions != null ? !userGroupPermissions.equals(that.userGroupPermissions) : that.userGroupPermissions != null)
return false;
return iamRolePermissions != null ? iamRolePermissions.equals(that.iamRolePermissions) : that.iamRolePermissions == null;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (categoryId != null ? categoryId.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
result = 31 * result + (path != null ? path.hashCode() : 0);
result = 31 * result + (createdTs != null ? createdTs.hashCode() : 0);
result = 31 * result + (lastUpdatedTs != null ? lastUpdatedTs.hashCode() : 0);
result = 31 * result + (createdBy != null ? createdBy.hashCode() : 0);
result = 31 * result + (lastUpdatedBy != null ? lastUpdatedBy.hashCode() : 0);
result = 31 * result + (owner != null ? owner.hashCode() : 0);
result = 31 * result + (userGroupPermissions != null ? userGroupPermissions.hashCode() : 0);
result = 31 * result + (iamRolePermissions != null ? iamRolePermissions.hashCode() : 0);
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.elasticsearch;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.security.KeyStore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nullable;
import javax.net.ssl.SSLContext;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.entity.ContentType;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.nio.entity.NStringEntity;
import org.apache.http.ssl.SSLContexts;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
/**
* Transforms for reading and writing data from/to Elasticsearch.
*
* <h3>Reading from Elasticsearch</h3>
*
* <p>{@link ElasticsearchIO#read ElasticsearchIO.read()} returns a bounded {@link PCollection
* PCollection<String>} representing JSON documents.
*
* <p>To configure the {@link ElasticsearchIO#read}, you have to provide a connection configuration
* containing the HTTP address of the instances, an index name and a type. The following example
* illustrates options for configuring the source:
*
* <pre>{@code
* pipeline.apply(ElasticsearchIO.read().withConnectionConfiguration(
* ElasticsearchIO.ConnectionConfiguration.create("http://host:9200", "my-index", "my-type")
* )
*
* }</pre>
*
* <p>The connection configuration also accepts optional configuration: {@code withUsername()} and
* {@code withPassword()}.
*
* <p>You can also specify a query on the {@code read()} using {@code withQuery()}.
*
* <h3>Writing to Elasticsearch</h3>
*
* <p>To write documents to Elasticsearch, use {@link ElasticsearchIO#write
* ElasticsearchIO.write()}, which writes JSON documents from a {@link PCollection
* PCollection<String>} (which can be bounded or unbounded).
*
* <p>To configure {@link ElasticsearchIO#write ElasticsearchIO.write()}, similar to the read, you
* have to provide a connection configuration. For instance:
*
* <pre>{@code
* pipeline
* .apply(...)
* .apply(ElasticsearchIO.write().withConnectionConfiguration(
* ElasticsearchIO.ConnectionConfiguration.create("http://host:9200", "my-index", "my-type")
* )
*
* }</pre>
*
* <p>Optionally, you can provide {@code withBatchSize()} and {@code withBatchSizeBytes()} to
* specify the size of the write batch in number of documents or in bytes.
*
* <p>Optionally, you can provide an {@link ElasticsearchIO.Write.FieldValueExtractFn} using {@code
* withIdFn()} that will be run to extract the id value out of the provided document rather than
* using the document id auto-generated by Elasticsearch.
*
* <p>Optionally, you can provide {@link ElasticsearchIO.Write.FieldValueExtractFn} using {@code
* withIndexFn()} or {@code withTypeFn()} to enable per-document routing to the target
* Elasticsearch index and type.
*/
@Experimental(Experimental.Kind.SOURCE_SINK)
public class ElasticsearchIO {
public static Read read() {
// default scrollKeepalive = 5m as a majorant for un-predictable time between 2 start/read calls
// default batchSize to 100 as recommended by ES dev team as a safe value when dealing
// with big documents and still a good compromise for performances
return new AutoValue_ElasticsearchIO_Read.Builder()
.setScrollKeepalive("5m")
.setBatchSize(100L)
.build();
}
public static Write write() {
return new AutoValue_ElasticsearchIO_Write.Builder()
// advised default starting batch size in ES docs
.setMaxBatchSize(1000L)
// advised default starting batch size in ES docs
.setMaxBatchSizeBytes(5L * 1024L * 1024L)
.build();
}
private ElasticsearchIO() {}
private static final ObjectMapper mapper = new ObjectMapper();
@VisibleForTesting
static JsonNode parseResponse(Response response) throws IOException {
return mapper.readValue(response.getEntity().getContent(), JsonNode.class);
}
static void checkForErrors(Response response, int backendVersion) throws IOException {
JsonNode searchResult = parseResponse(response);
boolean errors = searchResult.path("errors").asBoolean();
if (errors) {
StringBuilder errorMessages =
new StringBuilder(
"Error writing to Elasticsearch, some elements could not be inserted:");
JsonNode items = searchResult.path("items");
//some items present in bulk might have errors, concatenate error messages
for (JsonNode item : items) {
String errorRootName = "";
if (backendVersion == 2) {
errorRootName = "create";
} else if (backendVersion == 5) {
errorRootName = "index";
}
JsonNode errorRoot = item.path(errorRootName);
JsonNode error = errorRoot.get("error");
if (error != null) {
String type = error.path("type").asText();
String reason = error.path("reason").asText();
String docId = errorRoot.path("_id").asText();
errorMessages.append(String.format("%nDocument id %s: %s (%s)", docId, reason, type));
JsonNode causedBy = error.get("caused_by");
if (causedBy != null) {
String cbReason = causedBy.path("reason").asText();
String cbType = causedBy.path("type").asText();
errorMessages.append(String.format("%nCaused by: %s (%s)", cbReason, cbType));
}
}
}
throw new IOException(errorMessages.toString());
}
}
/** A POJO describing a connection configuration to Elasticsearch. */
@AutoValue
public abstract static class ConnectionConfiguration implements Serializable {
public abstract List<String> getAddresses();
@Nullable
public abstract String getUsername();
@Nullable
public abstract String getPassword();
@Nullable
public abstract String getKeystorePath();
@Nullable
public abstract String getKeystorePassword();
public abstract String getIndex();
public abstract String getType();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setAddresses(List<String> addresses);
abstract Builder setUsername(String username);
abstract Builder setPassword(String password);
abstract Builder setKeystorePath(String keystorePath);
abstract Builder setKeystorePassword(String password);
abstract Builder setIndex(String index);
abstract Builder setType(String type);
abstract ConnectionConfiguration build();
}
/**
* Creates a new Elasticsearch connection configuration.
*
* @param addresses list of addresses of Elasticsearch nodes
* @param index the index toward which the requests will be issued
* @param type the document type toward which the requests will be issued
* @return the connection configuration object
*/
public static ConnectionConfiguration create(String[] addresses, String index, String type) {
checkArgument(addresses != null, "addresses can not be null");
checkArgument(addresses.length > 0, "addresses can not be empty");
checkArgument(index != null, "index can not be null");
checkArgument(type != null, "type can not be null");
ConnectionConfiguration connectionConfiguration =
new AutoValue_ElasticsearchIO_ConnectionConfiguration.Builder()
.setAddresses(Arrays.asList(addresses))
.setIndex(index)
.setType(type)
.build();
return connectionConfiguration;
}
/**
* If Elasticsearch authentication is enabled, provide the username.
*
* @param username the username used to authenticate to Elasticsearch
*/
public ConnectionConfiguration withUsername(String username) {
checkArgument(username != null, "username can not be null");
checkArgument(!username.isEmpty(), "username can not be empty");
return builder().setUsername(username).build();
}
/**
* If Elasticsearch authentication is enabled, provide the password.
*
* @param password the password used to authenticate to Elasticsearch
*/
public ConnectionConfiguration withPassword(String password) {
checkArgument(password != null, "password can not be null");
checkArgument(!password.isEmpty(), "password can not be empty");
return builder().setPassword(password).build();
}
/**
* If Elasticsearch uses SSL/TLS with mutual authentication (via shield),
* provide the keystore containing the client key.
*
* @param keystorePath the location of the keystore containing the client key.
*/
public ConnectionConfiguration withKeystorePath(String keystorePath) {
checkArgument(keystorePath != null, "keystorePath can not be null");
checkArgument(!keystorePath.isEmpty(), "keystorePath can not be empty");
return builder().setKeystorePath(keystorePath).build();
}
/**
* If Elasticsearch uses SSL/TLS with mutual authentication (via shield),
* provide the password to open the client keystore.
*
* @param keystorePassword the password of the client keystore.
*/
public ConnectionConfiguration withKeystorePassword(String keystorePassword) {
checkArgument(keystorePassword != null, "keystorePassword can not be null");
return builder().setKeystorePassword(keystorePassword).build();
}
private void populateDisplayData(DisplayData.Builder builder) {
builder.add(DisplayData.item("address", getAddresses().toString()));
builder.add(DisplayData.item("index", getIndex()));
builder.add(DisplayData.item("type", getType()));
builder.addIfNotNull(DisplayData.item("username", getUsername()));
builder.addIfNotNull(DisplayData.item("keystore.path", getKeystorePath()));
}
@VisibleForTesting
RestClient createClient() throws IOException {
HttpHost[] hosts = new HttpHost[getAddresses().size()];
int i = 0;
for (String address : getAddresses()) {
URL url = new URL(address);
hosts[i] = new HttpHost(url.getHost(), url.getPort(), url.getProtocol());
i++;
}
RestClientBuilder restClientBuilder = RestClient.builder(hosts);
if (getUsername() != null) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(
AuthScope.ANY, new UsernamePasswordCredentials(getUsername(), getPassword()));
restClientBuilder.setHttpClientConfigCallback(
httpAsyncClientBuilder ->
httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
if (getKeystorePath() != null && !getKeystorePath().isEmpty()) {
try {
KeyStore keyStore = KeyStore.getInstance("jks");
try (InputStream is = new FileInputStream(new File(getKeystorePath()))) {
String keystorePassword = getKeystorePassword();
keyStore.load(is, (keystorePassword == null) ? null : keystorePassword.toCharArray());
}
final SSLContext sslContext = SSLContexts.custom()
.loadTrustMaterial(keyStore, new TrustSelfSignedStrategy()).build();
final SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslContext);
restClientBuilder.setHttpClientConfigCallback(
httpClientBuilder ->
httpClientBuilder.setSSLContext(sslContext).setSSLStrategy(sessionStrategy));
} catch (Exception e) {
throw new IOException("Can't load the client certificate from the keystore", e);
}
}
return restClientBuilder.build();
}
}
/** A {@link PTransform} reading data from Elasticsearch. */
@AutoValue
public abstract static class Read extends PTransform<PBegin, PCollection<String>> {
private static final long MAX_BATCH_SIZE = 10000L;
@Nullable
abstract ConnectionConfiguration getConnectionConfiguration();
@Nullable
abstract String getQuery();
abstract String getScrollKeepalive();
abstract long getBatchSize();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setQuery(String query);
abstract Builder setScrollKeepalive(String scrollKeepalive);
abstract Builder setBatchSize(long batchSize);
abstract Read build();
}
/** Provide the Elasticsearch connection configuration object. */
public Read withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a query used while reading from Elasticsearch.
*
* @param query the query. See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/2.4/query-dsl.html">Query
* DSL</a>
*/
public Read withQuery(String query) {
checkArgument(query != null, "query can not be null");
checkArgument(!query.isEmpty(), "query can not be empty");
return builder().setQuery(query).build();
}
/**
* Provide a scroll keepalive. See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-scroll.html">scroll
* API</a> Default is "5m". Change this only if you get "No search context found" errors.
*/
public Read withScrollKeepalive(String scrollKeepalive) {
checkArgument(scrollKeepalive != null, "scrollKeepalive can not be null");
checkArgument(!"0m".equals(scrollKeepalive), "scrollKeepalive can not be 0m");
return builder().setScrollKeepalive(scrollKeepalive).build();
}
/**
* Provide a size for the scroll read. See <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-scroll.html">
* scroll API</a> Default is 100. Maximum is 10 000. If documents are small, increasing batch
* size might improve read performance. If documents are big, you might need to decrease
* batchSize
*
* @param batchSize number of documents read in each scroll read
*/
public Read withBatchSize(long batchSize) {
checkArgument(
batchSize > 0 && batchSize <= MAX_BATCH_SIZE,
"batchSize must be > 0 and <= %s, but was: %s",
MAX_BATCH_SIZE,
batchSize);
return builder().setBatchSize(batchSize).build();
}
@Override
public PCollection<String> expand(PBegin input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(
connectionConfiguration != null,
"withConnectionConfiguration() is required");
return input.apply(org.apache.beam.sdk.io.Read
.from(new BoundedElasticsearchSource(this, null, null, null)));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.addIfNotNull(DisplayData.item("query", getQuery()));
builder.addIfNotNull(DisplayData.item("batchSize", getBatchSize()));
builder.addIfNotNull(DisplayData.item("scrollKeepalive", getScrollKeepalive()));
getConnectionConfiguration().populateDisplayData(builder);
}
}
/** A {@link BoundedSource} reading from Elasticsearch. */
@VisibleForTesting
public static class BoundedElasticsearchSource extends BoundedSource<String> {
private int backendVersion;
private final Read spec;
// shardPreference is the shard id where the source will read the documents
@Nullable
private final String shardPreference;
@Nullable
private final Integer numSlices;
@Nullable
private final Integer sliceId;
//constructor used in split() when we know the backend version
private BoundedElasticsearchSource(Read spec, @Nullable String shardPreference,
@Nullable Integer numSlices, @Nullable Integer sliceId, int backendVersion) {
this.backendVersion = backendVersion;
this.spec = spec;
this.shardPreference = shardPreference;
this.numSlices = numSlices;
this.sliceId = sliceId;
}
@VisibleForTesting
BoundedElasticsearchSource(Read spec, @Nullable String shardPreference,
@Nullable Integer numSlices, @Nullable Integer sliceId) {
this.spec = spec;
this.shardPreference = shardPreference;
this.numSlices = numSlices;
this.sliceId = sliceId;
}
@Override
public List<? extends BoundedSource<String>> split(
long desiredBundleSizeBytes, PipelineOptions options) throws Exception {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
this.backendVersion = getBackendVersion(connectionConfiguration);
List<BoundedElasticsearchSource> sources = new ArrayList<>();
if (backendVersion == 2) {
// 1. We split per shard :
// unfortunately, Elasticsearch 2. x doesn 't provide a way to do parallel reads on a single
// shard.So we do not use desiredBundleSize because we cannot split shards.
// With the slice API in ES 5.0 we will be able to use desiredBundleSize.
// Basically we will just ask the slice API to return data
// in nbBundles = estimatedSize / desiredBundleSize chuncks.
// So each beam source will read around desiredBundleSize volume of data.
JsonNode statsJson = BoundedElasticsearchSource.getStats(connectionConfiguration, true);
JsonNode shardsJson =
statsJson
.path("indices")
.path(connectionConfiguration.getIndex())
.path("shards");
Iterator<Map.Entry<String, JsonNode>> shards = shardsJson.fields();
while (shards.hasNext()) {
Map.Entry<String, JsonNode> shardJson = shards.next();
String shardId = shardJson.getKey();
sources.add(new BoundedElasticsearchSource(spec, shardId, null, null, backendVersion));
}
checkArgument(!sources.isEmpty(), "No shard found");
} else if (backendVersion == 5) {
long indexSize = BoundedElasticsearchSource.estimateIndexSize(connectionConfiguration);
float nbBundlesFloat = (float) indexSize / desiredBundleSizeBytes;
int nbBundles = (int) Math.ceil(nbBundlesFloat);
//ES slice api imposes that the number of slices is <= 1024 even if it can be overloaded
if (nbBundles > 1024) {
nbBundles = 1024;
}
// split the index into nbBundles chunks of desiredBundleSizeBytes by creating
// nbBundles sources each reading a slice of the index
// (see https://goo.gl/MhtSWz)
// the slice API allows to split the ES shards
// to have bundles closer to desiredBundleSizeBytes
for (int i = 0; i < nbBundles; i++) {
sources.add(new BoundedElasticsearchSource(spec, null, nbBundles, i, backendVersion));
}
}
return sources;
}
@Override
public long getEstimatedSizeBytes(PipelineOptions options) throws IOException {
return estimateIndexSize(spec.getConnectionConfiguration());
}
@VisibleForTesting
static long estimateIndexSize(ConnectionConfiguration connectionConfiguration)
throws IOException {
// we use indices stats API to estimate size and list the shards
// (https://www.elastic.co/guide/en/elasticsearch/reference/2.4/indices-stats.html)
// as Elasticsearch 2.x doesn't not support any way to do parallel read inside a shard
// the estimated size bytes is not really used in the split into bundles.
// However, we implement this method anyway as the runners can use it.
// NB: Elasticsearch 5.x now provides the slice API.
// (https://www.elastic.co/guide/en/elasticsearch/reference/5.0/search-request-scroll.html
// #sliced-scroll)
JsonNode statsJson = getStats(connectionConfiguration, false);
JsonNode indexStats =
statsJson
.path("indices")
.path(connectionConfiguration.getIndex())
.path("primaries");
JsonNode store = indexStats.path("store");
return store.path("size_in_bytes").asLong();
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
spec.populateDisplayData(builder);
builder.addIfNotNull(DisplayData.item("shard", shardPreference));
builder.addIfNotNull(DisplayData.item("numSlices", numSlices));
builder.addIfNotNull(DisplayData.item("sliceId", sliceId));
}
@Override
public BoundedReader<String> createReader(PipelineOptions options) {
return new BoundedElasticsearchReader(this);
}
@Override
public void validate() {
spec.validate(null);
}
@Override
public Coder<String> getOutputCoder() {
return StringUtf8Coder.of();
}
private static JsonNode getStats(ConnectionConfiguration connectionConfiguration,
boolean shardLevel) throws IOException {
HashMap<String, String> params = new HashMap<>();
if (shardLevel) {
params.put("level", "shards");
}
String endpoint = String.format("/%s/_stats", connectionConfiguration.getIndex());
try (RestClient restClient = connectionConfiguration.createClient()) {
return parseResponse(
restClient.performRequest("GET", endpoint, params));
}
}
}
private static class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> {
private final BoundedElasticsearchSource source;
private RestClient restClient;
private String current;
private String scrollId;
private ListIterator<String> batchIterator;
private BoundedElasticsearchReader(BoundedElasticsearchSource source) {
this.source = source;
}
@Override
public boolean start() throws IOException {
restClient = source.spec.getConnectionConfiguration().createClient();
String query = source.spec.getQuery();
if (query == null) {
query = "{\"query\": { \"match_all\": {} }}";
}
if (source.backendVersion == 5 && source.numSlices != null && source.numSlices > 1) {
//if there is more than one slice, add the slice to the user query
String sliceQuery = String
.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId,
source.numSlices);
query = query.replaceFirst("\\{", "{" + sliceQuery + ",");
}
Response response;
String endPoint =
String.format(
"/%s/%s/_search",
source.spec.getConnectionConfiguration().getIndex(),
source.spec.getConnectionConfiguration().getType());
Map<String, String> params = new HashMap<>();
params.put("scroll", source.spec.getScrollKeepalive());
if (source.backendVersion == 2) {
params.put("size", String.valueOf(source.spec.getBatchSize()));
if (source.shardPreference != null) {
params.put("preference", "_shards:" + source.shardPreference);
}
}
HttpEntity queryEntity = new NStringEntity(query,
ContentType.APPLICATION_JSON);
response =
restClient.performRequest("GET", endPoint, params, queryEntity);
JsonNode searchResult = parseResponse(response);
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
private void updateScrollId(JsonNode searchResult) {
scrollId = searchResult.path("_scroll_id").asText();
}
@Override
public boolean advance() throws IOException {
if (batchIterator.hasNext()) {
current = batchIterator.next();
return true;
} else {
String requestBody =
String.format(
"{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}",
source.spec.getScrollKeepalive(), scrollId);
HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
Response response =
restClient.performRequest(
"GET", "/_search/scroll", Collections.emptyMap(), scrollEntity);
JsonNode searchResult = parseResponse(response);
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
}
private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) {
//stop if no more data
JsonNode hits = searchResult.path("hits").path("hits");
if (hits.size() == 0) {
current = null;
batchIterator = null;
return false;
}
// list behind iterator is empty
List<String> batch = new ArrayList<>();
for (JsonNode hit : hits) {
String document = hit.path("_source").toString();
batch.add(document);
}
batchIterator = batch.listIterator();
current = batchIterator.next();
return true;
}
@Override
public String getCurrent() throws NoSuchElementException {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public void close() throws IOException {
// remove the scroll
String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId);
HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
try {
restClient.performRequest("DELETE", "/_search/scroll", Collections.emptyMap(), entity);
} finally {
if (restClient != null) {
restClient.close();
}
}
}
@Override
public BoundedSource<String> getCurrentSource() {
return source;
}
}
/** A {@link PTransform} writing data to Elasticsearch. */
@AutoValue
public abstract static class Write extends PTransform<PCollection<String>, PDone> {
/**
* Interface allowing a specific field value to be returned from a parsed JSON document. This is
* used for using explicit document ids, and for dynamic routing (index/Type) on a document
* basis. A null response will result in default behaviour and an exception will be propagated
* as a failure.
*/
public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {}
@Nullable
abstract ConnectionConfiguration getConnectionConfiguration();
abstract long getMaxBatchSize();
abstract long getMaxBatchSizeBytes();
@Nullable
abstract FieldValueExtractFn getIdFn();
@Nullable
abstract FieldValueExtractFn getIndexFn();
@Nullable
abstract FieldValueExtractFn getTypeFn();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setMaxBatchSize(long maxBatchSize);
abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes);
abstract Builder setIdFn(FieldValueExtractFn idFunction);
abstract Builder setIndexFn(FieldValueExtractFn indexFn);
abstract Builder setTypeFn(FieldValueExtractFn typeFn);
abstract Write build();
}
/**
* Provide the Elasticsearch connection configuration object.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link Write} with connection configuration set
*/
public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a maximum size in number of documents for the batch see bulk API
* (https://www.elastic.co/guide/en/elasticsearch/reference/2.4/docs-bulk.html). Default is 1000
* docs (like Elasticsearch bulk size advice). See
* https://www.elastic.co/guide/en/elasticsearch/guide/current/bulk.html Depending on the
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSize maximum batch size in number of documents
* @return the {@link Write} with connection batch size set
*/
public Write withMaxBatchSize(long batchSize) {
checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize);
return builder().setMaxBatchSize(batchSize).build();
}
/**
* Provide a maximum size in bytes for the batch see bulk API
* (https://www.elastic.co/guide/en/elasticsearch/reference/2.4/docs-bulk.html). Default is 5MB
* (like Elasticsearch bulk size advice). See
* https://www.elastic.co/guide/en/elasticsearch/guide/current/bulk.html Depending on the
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSizeBytes maximum batch size in bytes
* @return the {@link Write} with connection batch size in bytes set
*/
public Write withMaxBatchSizeBytes(long batchSizeBytes) {
checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes);
return builder().setMaxBatchSizeBytes(batchSizeBytes).build();
}
/**
* Provide a function to extract the id from the document. This id will be used as the document
* id in Elasticsearch. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param idFn to extract the document ID
* @return the {@link Write} with the function set
*/
public Write withIdFn(FieldValueExtractFn idFn) {
checkArgument(idFn != null, "idFn must not be null");
return builder().setIdFn(idFn).build();
}
/**
* Provide a function to extract the target index from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param indexFn to extract the destination index from
* @return the {@link Write} with the function set
*/
public Write withIndexFn(FieldValueExtractFn indexFn) {
checkArgument(indexFn != null, "indexFn must not be null");
return builder().setIndexFn(indexFn).build();
}
/**
* Provide a function to extract the target type from the document allowing for dynamic document
* routing. Should the function throw an Exception then the batch will fail and the exception
* propagated. Users are encouraged to consider carefully if multipe types are a sensible model
* <a
* href="https://www.elastic.co/blog/index-type-parent-child-join-now-future-in-elasticsearch">as
* discussed in this blog</a>.
*
* @param typeFn to extract the destination index from
* @return the {@link Write} with the function set
*/
public Write withTypeFn(FieldValueExtractFn typeFn) {
checkArgument(typeFn != null, "typeFn must not be null");
return builder().setTypeFn(typeFn).build();
}
@Override
public PDone expand(PCollection<String> input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(connectionConfiguration != null, "withConnectionConfiguration() is required");
input.apply(ParDo.of(new WriteFn(this)));
return PDone.in(input.getPipeline());
}
/**
* {@link DoFn} to for the {@link Write} transform.
* */
@VisibleForTesting
static class WriteFn extends DoFn<String, Void> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private int backendVersion;
private final Write spec;
private transient RestClient restClient;
private ArrayList<String> batch;
private long currentBatchSizeBytes;
// Encapsulates the elements which form a complete Elasticsearch document address
@JsonPropertyOrder({ "_index", "_type", "_id" })
@JsonInclude(JsonInclude.Include.NON_NULL)
private static class DocumentAddress implements Serializable {
@JsonProperty("_index")
final String index;
@JsonProperty("_type")
final String type;
@JsonProperty("_id")
final String id;
DocumentAddress(String index, String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
}
@VisibleForTesting
WriteFn(Write spec) {
this.spec = spec;
}
@Setup
public void setup() throws Exception {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
backendVersion = getBackendVersion(connectionConfiguration);
restClient = connectionConfiguration.createClient();
}
@StartBundle
public void startBundle(StartBundleContext context) {
batch = new ArrayList<>();
currentBatchSizeBytes = 0;
}
/**
* Extracts the components that comprise the document address from the document using the
* {@link FieldValueExtractFn} configured. This allows any or all of the index, type and
* document id to be controlled on a per document basis. If none are provided the an empty
* default of {@code {}} is returned. Sanitization of the index is performed, automatically
* lower-casing the value as required by Elasticsearch.
*
* @param document the json from which the index, type and id may be extracted
* @return the document address as JSON or the default
* @throws IOException if the document cannot be parsed as JSON
*/
private String getDocumentAddress(String document) throws IOException {
if (spec.getIndexFn() != null || spec.getTypeFn() != null || spec.getIdFn() != null) {
// parse once and reused for efficiency
JsonNode parsedDocument = OBJECT_MAPPER.readTree(document);
DocumentAddress address =
new DocumentAddress(
spec.getIndexFn() != null
? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument))
: null,
spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null,
spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null);
return OBJECT_MAPPER.writeValueAsString(address);
} else {
return "{}"; // use configuration and auto-generated document IDs
}
}
private static String lowerCaseOrNull(String input) {
return input == null ? null : input.toLowerCase();
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
String document = context.element();
String documentAddress = getDocumentAddress(document);
batch.add(String.format("{ \"index\" : %s }%n%s%n", documentAddress, document));
currentBatchSizeBytes += document.getBytes(StandardCharsets.UTF_8).length;
if (batch.size() >= spec.getMaxBatchSize()
|| currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) {
flushBatch();
}
}
@FinishBundle
public void finishBundle(FinishBundleContext context) throws Exception {
flushBatch();
}
private void flushBatch() throws IOException {
if (batch.isEmpty()) {
return;
}
StringBuilder bulkRequest = new StringBuilder();
for (String json : batch) {
bulkRequest.append(json);
}
batch.clear();
currentBatchSizeBytes = 0;
Response response;
// Elasticsearch will default to the index/type provided here if none are set in the
// document meta (i.e. using ElasticsearchIO$Write#withIndexFn and
// ElasticsearchIO$Write#withTypeFn options)
String endPoint =
String.format(
"/%s/%s/_bulk",
spec.getConnectionConfiguration().getIndex(),
spec.getConnectionConfiguration().getType());
HttpEntity requestBody =
new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON);
response = restClient.performRequest("POST", endPoint, Collections.emptyMap(), requestBody);
checkForErrors(response, backendVersion);
}
@Teardown
public void closeClient() throws Exception {
if (restClient != null) {
restClient.close();
}
}
}
}
static int getBackendVersion(ConnectionConfiguration connectionConfiguration) {
try (RestClient restClient = connectionConfiguration.createClient()) {
Response response = restClient.performRequest("GET", "");
JsonNode jsonNode = parseResponse(response);
int backendVersion = Integer
.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1));
checkArgument((backendVersion == 2 || backendVersion == 5),
"The Elasticsearch version to connect to is %s.x. "
+ "This version of the ElasticsearchIO is only compatible with "
+ "Elasticsearch v5.x and v2.x",
backendVersion);
return backendVersion;
} catch (IOException e) {
throw (new IllegalArgumentException("Cannot get Elasticsearch version"));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.registry.client;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.function.ThrowableAction;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.Page;
import org.apache.dubbo.event.Event;
import org.apache.dubbo.event.EventDispatcher;
import org.apache.dubbo.registry.client.event.ServiceDiscoveryDestroyedEvent;
import org.apache.dubbo.registry.client.event.ServiceDiscoveryDestroyingEvent;
import org.apache.dubbo.registry.client.event.ServiceDiscoveryExceptionEvent;
import org.apache.dubbo.registry.client.event.ServiceDiscoveryInitializedEvent;
import org.apache.dubbo.registry.client.event.ServiceDiscoveryInitializingEvent;
import org.apache.dubbo.registry.client.event.ServiceInstancePreRegisteredEvent;
import org.apache.dubbo.registry.client.event.ServiceInstancePreUnregisteredEvent;
import org.apache.dubbo.registry.client.event.ServiceInstanceRegisteredEvent;
import org.apache.dubbo.registry.client.event.ServiceInstanceUnregisteredEvent;
import org.apache.dubbo.registry.client.event.listener.ServiceInstancesChangedListener;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.util.Optional.empty;
import static java.util.Optional.of;
/**
* The decorating implementation of {@link ServiceDiscovery} to published the {@link Event Dubbo event} when some actions are
* executing, including:
* <ul>
* <li>Lifecycle actions:</li>
* <table cellpadding="0" cellspacing="0" border="1">
* <thead>
* <tr>
* <th>Action</th>
* <th>before</th>
* <th>After</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>{@link #INITIALIZE_ACTION start}</td>
* <td>{@link ServiceDiscoveryInitializingEvent}</td>
* <td>{@link ServiceDiscoveryInitializedEvent}</td>
* </tr>
* <tr>
* <td>{@link #DESTROY_ACTION stop}</td>
* <td>{@link ServiceDiscoveryDestroyingEvent}</td>
* <td>{@link ServiceDiscoveryDestroyedEvent}</td>
* </tr>
* </tbody>
* </table>
* <li>Registration actions:</li>
* <table cellpadding="0" cellspacing="0" border="1">
* <thead>
* <tr>
* <th>Action</th>
* <th>before</th>
* <th>After</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>{@link #REGISTER_ACTION register}</td>
* <td>{@link ServiceInstancePreRegisteredEvent}</td>
* <td>{@link ServiceInstanceRegisteredEvent}</td>
* </tr>
* <tr>
* <td>{@link #UPDATE_ACTION update}</td>
* <td>N/A</td>
* <td>N/A</td>
* </tr>
* <tr>
* <td>{@link #UNREGISTER_ACTION unregister}</td>
* <td>N/A</td>
* <td>N/A</td>
* </tr>
* </tbody>
* </table>
* </ul>
*
* @see ServiceDiscovery
* @see ServiceDiscoveryInitializingEvent
* @see ServiceDiscoveryInitializedEvent
* @see ServiceInstancePreRegisteredEvent
* @see ServiceInstanceRegisteredEvent
* @see ServiceDiscoveryDestroyingEvent
* @see ServiceDiscoveryDestroyedEvent
* @since 2.7.5
*/
final class EventPublishingServiceDiscovery implements ServiceDiscovery {
/**
* @see ServiceInstancePreRegisteredEvent
* @see ServiceInstanceRegisteredEvent
*/
protected static final String REGISTER_ACTION = "register";
protected static final String UPDATE_ACTION = "update";
protected static final String UNREGISTER_ACTION = "unregister";
/**
* @see ServiceDiscoveryInitializingEvent
* @see ServiceDiscoveryInitializedEvent
*/
protected static final String INITIALIZE_ACTION = "initialize";
/**
* @see ServiceDiscoveryDestroyingEvent
* @see ServiceDiscoveryDestroyedEvent
*/
protected static final String DESTROY_ACTION = "destroy";
protected final EventDispatcher eventDispatcher = EventDispatcher.getDefaultExtension();
protected final AtomicBoolean initialized = new AtomicBoolean(false);
protected final AtomicBoolean destroyed = new AtomicBoolean(false);
protected final Logger logger = LoggerFactory.getLogger(getClass());
private final ServiceDiscovery serviceDiscovery;
protected EventPublishingServiceDiscovery(ServiceDiscovery serviceDiscovery) {
if (serviceDiscovery == null) {
throw new NullPointerException("The ServiceDiscovery argument must not be null!");
}
this.serviceDiscovery = serviceDiscovery;
}
@Override
public final void register(ServiceInstance serviceInstance) throws RuntimeException {
assertDestroyed(REGISTER_ACTION);
assertInitialized(REGISTER_ACTION);
executeWithEvents(
of(new ServiceInstancePreRegisteredEvent(serviceDiscovery, serviceInstance)),
() -> serviceDiscovery.register(serviceInstance),
of(new ServiceInstanceRegisteredEvent(serviceDiscovery, serviceInstance))
);
}
@Override
public final void update(ServiceInstance serviceInstance) throws RuntimeException {
assertDestroyed(UPDATE_ACTION);
assertInitialized(UPDATE_ACTION);
executeWithEvents(
empty(),
() -> serviceDiscovery.update(serviceInstance),
empty()
);
}
@Override
public final void unregister(ServiceInstance serviceInstance) throws RuntimeException {
assertDestroyed(UNREGISTER_ACTION);
assertInitialized(UNREGISTER_ACTION);
executeWithEvents(
of(new ServiceInstancePreUnregisteredEvent(this, serviceInstance)),
() -> serviceDiscovery.unregister(serviceInstance),
of(new ServiceInstanceUnregisteredEvent(this, serviceInstance))
);
}
@Override
public Set<String> getServices() {
return serviceDiscovery.getServices();
}
@Override
public List<ServiceInstance> getInstances(String serviceName) throws NullPointerException {
return serviceDiscovery.getInstances(serviceName);
}
@Override
public Page<ServiceInstance> getInstances(String serviceName, int offset, int pageSize) throws NullPointerException, IllegalArgumentException {
return serviceDiscovery.getInstances(serviceName, offset, pageSize);
}
@Override
public Page<ServiceInstance> getInstances(String serviceName, int offset, int pageSize, boolean healthyOnly) throws NullPointerException, IllegalArgumentException {
return serviceDiscovery.getInstances(serviceName, offset, pageSize, healthyOnly);
}
@Override
public Map<String, Page<ServiceInstance>> getInstances(Iterable<String> serviceNames, int offset, int requestSize) throws NullPointerException, IllegalArgumentException {
return serviceDiscovery.getInstances(serviceNames, offset, requestSize);
}
@Override
public String toString() {
return serviceDiscovery.toString();
}
@Override
public void addServiceInstancesChangedListener(ServiceInstancesChangedListener listener) throws NullPointerException, IllegalArgumentException {
serviceDiscovery.addServiceInstancesChangedListener(listener);
eventDispatcher.addEventListener(listener);
}
@Override
public URL getUrl() {
return serviceDiscovery.getUrl();
}
@Override
public ServiceInstance getLocalInstance() {
return serviceDiscovery.getLocalInstance();
}
@Override
public void initialize(URL registryURL) {
assertDestroyed(INITIALIZE_ACTION);
if (isInitialized()) {
if (logger.isWarnEnabled()) {
logger.warn("It's ignored to start current ServiceDiscovery, because it has been started.");
}
return;
}
executeWithEvents(
of(new ServiceDiscoveryInitializingEvent(this, serviceDiscovery)),
() -> serviceDiscovery.initialize(registryURL),
of(new ServiceDiscoveryInitializedEvent(this, serviceDiscovery))
);
// doesn't start -> started
initialized.compareAndSet(false, true);
}
@Override
public void destroy() {
assertDestroyed(DESTROY_ACTION);
executeWithEvents(
of(new ServiceDiscoveryDestroyingEvent(this, serviceDiscovery)),
serviceDiscovery::destroy,
of(new ServiceDiscoveryDestroyedEvent(this, serviceDiscovery))
);
// doesn't stop -> stopped
destroyed.compareAndSet(false, true);
}
protected final void executeWithEvents(Optional<? extends Event> beforeEvent,
ThrowableAction action,
Optional<? extends Event> afterEvent) {
beforeEvent.ifPresent(this::dispatchEvent);
try {
action.execute();
} catch (Throwable e) {
logger.error("Execute action throws and dispatch a ServiceDiscoveryExceptionEvent.", e);
dispatchEvent(new ServiceDiscoveryExceptionEvent(this, serviceDiscovery, e));
}
afterEvent.ifPresent(this::dispatchEvent);
}
private void dispatchEvent(Event event) {
eventDispatcher.dispatch(event);
}
public final boolean isInitialized() {
return initialized.get();
}
public final boolean isDestroyed() {
return destroyed.get();
}
protected void assertDestroyed(String action) throws IllegalStateException {
if (isDestroyed()) {
throw new IllegalStateException("The action[" + action + "] is rejected, because the ServiceDiscovery is destroyed already.");
}
}
protected void assertInitialized(String action) throws IllegalStateException {
if (!isInitialized()) {
throw new IllegalStateException("The action[" + action + "] is rejected, because the ServiceDiscovery is not initialized yet.");
}
}
}
| |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.helper.AgentInstanceMother;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.security.Registration;
import com.thoughtworks.go.server.service.AgentBuildingInfo;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.util.SystemEnvironment;
import org.apache.commons.io.FileUtils;
import org.hamcrest.core.Is;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static com.thoughtworks.go.domain.AgentInstance.AgentType.LOCAL;
import static com.thoughtworks.go.domain.AgentInstance.AgentType.REMOTE;
import static com.thoughtworks.go.util.SystemUtil.currentWorkingDirectory;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.number.OrderingComparison.lessThan;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
public class AgentInstanceTest {
private SystemEnvironment systemEnvironment;
public AgentConfig agentConfig;
public AgentBuildingInfo defaultBuildingInfo;
private static final String DEFAULT_IP_ADDRESS = "10.18.5.1";
@Before
public void setUp() {
systemEnvironment = new SystemEnvironment();
agentConfig = new AgentConfig("uuid2", "CCeDev01", DEFAULT_IP_ADDRESS);
defaultBuildingInfo = new AgentBuildingInfo("pipeline", "buildLocator");
}
@After
public void tearDown() throws Exception {
FileUtils.deleteQuietly(new File("config/agentkeystore"));
new SystemEnvironment().setProperty("agent.connection.timeout", "300");
new SystemEnvironment().clearProperty(SystemEnvironment.AGENT_SIZE_LIMIT);
}
@Test
public void shouldReturnBuildLocator() {
AgentInstance building = AgentInstanceMother.building("buildLocator");
assertThat(building.getBuildLocator(), is("buildLocator"));
}
@Test
public void shouldReturnEmptyStringForNullOperatingSystem() {
AgentInstance building = AgentInstanceMother.missing();
assertThat(building.getOperatingSystem(), is(""));
}
@Test
public void shouldReturnHumanReadableUsableSpace() {
assertThat(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.pending(), 2 * 1024 * 1024 * 1024L).freeDiskSpace().toString(), is("2.0 GB"));
assertThat(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.pending(), null).freeDiskSpace().toString(), is(DiskSpace.UNKNOWN_DISK_SPACE));
}
@Test
public void shouldReturnUnknownUsableSpaceForMissingOrLostContactAgent() {
assertThat(AgentInstanceMother.missing().freeDiskSpace().toString(), is(DiskSpace.UNKNOWN_DISK_SPACE));
assertThat(AgentInstanceMother.lostContact().freeDiskSpace().toString(), is(DiskSpace.UNKNOWN_DISK_SPACE));
}
@Test
public void shouldKeepStatusAsCancelled() throws Exception {
AgentInstance building = AgentInstanceMother.building("buildLocator");
building.cancel();
building.update(buildingRuntimeInfo(building.agentConfig()));
assertThat(building.getStatus(), is(AgentStatus.Cancelled));
}
@Test
public void shouldUpdateAgentBackToIdleAfterCancelledTaskFinishes() throws Exception {
AgentInstance cancelled = AgentInstanceMother.cancelled();
AgentRuntimeInfo fromAgent = new AgentRuntimeInfo(cancelled.agentConfig().getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
fromAgent.idle();
cancelled.update(fromAgent);
assertThat(cancelled.getStatus(), is(AgentStatus.Idle));
}
@Test
public void shouldUpdateTheIntsallLocation() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
String installPath = "/var/lib/GoServer";
AgentRuntimeInfo newRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
newRuntimeInfo.setLocation(installPath);
agentInstance.update(newRuntimeInfo);
assertThat(agentInstance.getLocation(), is(installPath));
}
@Test
public void shouldUpdateTheUsableSpace() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
AgentRuntimeInfo newRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
newRuntimeInfo.setUsableSpace(1000L);
assertThat(agentInstance.getUsableSpace(), is(not(newRuntimeInfo.getUsableSpace())));
agentInstance.update(newRuntimeInfo);
assertThat(agentInstance.getUsableSpace(), is(newRuntimeInfo.getUsableSpace()));
}
@Test
public void shouldAssignCertificateToApprovedAgent() {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
Registration entry = agentInstance.assignCertification();
assertThat(entry.getChain().length, is(not(0)));
}
@Test
public void shouldNotAssignCertificateToPendingAgent() {
AgentRuntimeInfo agentRuntimeInfo = AgentRuntimeInfo.fromServer(agentConfig, false, "/var/lib", 0L, "linux", false);
AgentInstance agentInstance = AgentInstance.createFromLiveAgent(agentRuntimeInfo, systemEnvironment
);
Registration entry = agentInstance.assignCertification();
assertThat(entry.getChain().length, is(0));
}
@Test
public void shouldInitializeTheLastHeardTimeWhenFirstPing() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
Date time = agentInstance.getLastHeardTime();
assertThat(time, is(nullValue()));
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
time = agentInstance.getLastHeardTime();
assertThat(time, is(not(nullValue())));
}
@Test
public void shouldUpdateTheLastHeardTime() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
Date time = agentInstance.getLastHeardTime();
Thread.sleep(1000);
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
Date newtime = agentInstance.getLastHeardTime();
assertThat(newtime.after(time), is(true));
}
@Test
public void shouldUpdateSupportBuildCommandProtocolFlag() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
assertThat(agentInstance.getSupportsBuildCommandProtocol(), is(false));
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
assertThat(agentInstance.getSupportsBuildCommandProtocol(), is(false));
agentInstance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", true));
assertThat(agentInstance.getSupportsBuildCommandProtocol(), is(true));
}
@Test
public void shouldUpdateIPForPhysicalMachineWhenUpChanged() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
agentInstance.update(new AgentRuntimeInfo(new AgentIdentifier("ccedev01", "10.18.7.52", "uuid"), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
assertThat(agentInstance.agentConfig().getIpAddress(), is("10.18.7.52"));
}
@Test
public void shouldCleanBuildingInfoWhenAgentIsIdle() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
agentInstance.update(buildingRuntimeInfo());
agentInstance.update(idleRuntimeInfo());
assertThat(agentInstance.getBuildingInfo(), is(AgentBuildingInfo.NOT_BUILDING));
}
private AgentRuntimeInfo idleRuntimeInfo() {
AgentRuntimeInfo agentRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
agentRuntimeInfo.idle();
return agentRuntimeInfo;
}
@Test
public void shouldUpdateBuildingInfoWhenAgentIsBuilding() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
AgentRuntimeInfo agentRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
AgentBuildingInfo buildingInfo = new AgentBuildingInfo("running pipeline/stage/build", "buildLocator");
agentRuntimeInfo.busy(buildingInfo);
agentInstance.update(agentRuntimeInfo);
assertThat(agentInstance.getBuildingInfo(), is(buildingInfo));
}
@Test
public void shouldUpdateBuildingInfoWhenAgentIsBuildingWhenCancelled() throws Exception {
AgentInstance agentInstance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
agentInstance.update(buildingRuntimeInfo());
agentInstance.update(cancelRuntimeInfo());
assertThat(agentInstance.getBuildingInfo(), is(defaultBuildingInfo));
assertThat(agentInstance.getStatus(), is(AgentStatus.Cancelled));
}
private AgentRuntimeInfo cancelRuntimeInfo() {
AgentRuntimeInfo agentRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
agentRuntimeInfo.busy(defaultBuildingInfo);
agentRuntimeInfo.cancel();
return agentRuntimeInfo;
}
@Test
public void shouldNotChangePendingAgentIpAddress() throws Exception {
AgentInstance pending = AgentInstance.createFromLiveAgent(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false),
systemEnvironment);
AgentRuntimeInfo info = new AgentRuntimeInfo(new AgentIdentifier("ccedev01", "10.18.7.52", "uuid"), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
assertThat(pending.isIpChangeRequired(info.getIpAdress()), is(false));
}
@Test
public void shouldChangeIpWhenSameAgentIpChanged() throws Exception {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
AgentRuntimeInfo info = new AgentRuntimeInfo(new AgentIdentifier("ccedev01", "10.18.7.52", "uuid"), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
assertThat(instance.isIpChangeRequired(info.getIpAdress()), is(true));
}
@Test
public void shouldNotChangeIpWhenIpNotChanged() throws Exception {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
assertThat(instance.isIpChangeRequired(DEFAULT_IP_ADDRESS), is(false));
}
@Test
public void shouldDefaultToMissingStatusWhenSyncAnApprovedAgent() throws Exception {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
instance.syncConfig(agentConfig);
assertThat(instance.getStatus(), is(AgentStatus.Missing));
}
@Test
public void pendingAgentshouldNotBeRegistered() throws Exception {
AgentRuntimeInfo agentRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
AgentInstance instance = AgentInstance.createFromLiveAgent(agentRuntimeInfo, systemEnvironment
);
assertThat(instance.isRegistered(), is(false));
}
@Test
public void deniedAgentshouldBeRegistered() throws Exception {
agentConfig.disable();
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
assertThat(instance.isRegistered(), is(true));
}
@Test
public void shouldBeRegisteredForIdleAgent() throws Exception {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
instance.update(idleRuntimeInfo());
assertThat(instance.isRegistered(), is(true));
}
@Test
public void shouldBecomeIdleAfterApprove() throws Exception {
AgentInstance instance = AgentInstance.createFromLiveAgent(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false),
systemEnvironment);
instance.enable();
instance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
assertThat(instance.getStatus(), is(AgentStatus.Idle));
}
@Test
public void shouldBeMissingWhenNeverHeardFromAnyAgent() {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
assertThat(instance.getStatus(), is(AgentStatus.Missing));
instance.refresh(null);
assertThat(instance.getStatus(), is(AgentStatus.Missing));
}
@Test
public void shouldBeLostContactWhenLastHeardTimeExeedTimeOut() {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, new SystemEnvironment() {
public int getAgentConnectionTimeout() {
return -1;
}
});
assertThat(instance.getStatus(), is(AgentStatus.Missing));
instance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
instance.refresh(null);
assertThat(instance.getStatus(), is(AgentStatus.LostContact));
}
@Test
public void shouldNotRefreshDeniedAgent() throws Exception {
agentConfig.disable();
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, new SystemEnvironment() {
public int getAgentConnectionTimeout() {
return -1;
}
});
instance.update(new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
instance.refresh(null);
assertThat(instance.getStatus().getRuntimeStatus(), is(not(AgentRuntimeStatus.LostContact)));
}
@Test
public void shouldDenyPendingAgent() throws Exception {
AgentRuntimeInfo agentRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
AgentInstance instance = AgentInstance.createFromLiveAgent(agentRuntimeInfo, systemEnvironment
);
instance.deny();
assertThat(instance.getStatus(), is(AgentStatus.Disabled));
}
@Test
public void shouldBeLiveStatus() throws Exception {
AgentInstance instance = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
instance.update(idleRuntimeInfo());
instance.refresh(null);
assertThat(instance.getStatus(), is(AgentStatus.Idle));
}
@Test
public void shouldSyncIPWithConfig() {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
original.update(new AgentRuntimeInfo(new AgentIdentifier("CCeDev01", "10.18.5.2", "uuid2"), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false));
assertThat(original.agentConfig(), is(new AgentConfig("uuid2", "CCeDev01", "10.18.5.2")));
}
@Test
public void shouldKeepOriginalStatusWhenAgentIsNotDenied() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
original.update(buildingRuntimeInfo(agentConfig));
original.syncConfig(agentConfig);
assertThat(original.getStatus(), is(AgentStatus.Building));
}
@Test
public void shouldDenyAgentWhenAgentIsDeniedInConfigFile() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
original.update(buildingRuntimeInfo());
AgentConfig newAgentConfig = new AgentConfig(agentConfig.getUuid(), agentConfig.getHostname(), agentConfig.getIpAddress());
newAgentConfig.disable();
original.syncConfig(newAgentConfig);
assertThat(original.getStatus(), is(AgentStatus.Disabled));
}
@Test
public void shouldDenyAgentWhenItIsNotBuilding() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
original.update(idleRuntimeInfo());
original.deny();
assertThat(agentConfig.isDisabled(), is(true));
assertThat(original.getStatus(), is(AgentStatus.Disabled));
}
@Test
public void shouldReturnFalseWhenAgentHasEnoughSpace() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, new SystemEnvironment() {
@Override public long getAgentSizeLimit() {
return 100 * 1024 * 1024;
}
});
AgentRuntimeInfo newRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
long is110M = 110 * 1024 * 1024;
newRuntimeInfo.setUsableSpace(is110M);
original.update(newRuntimeInfo);
assertThat(original.isLowDiskSpace(), is(false));
}
@Test
public void shouldReturnTrueWhenFreeDiskOnAgentIsLow() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, new SystemEnvironment() {
@Override public long getAgentSizeLimit() {
return 100 * 1024 * 1024;
}
});
AgentRuntimeInfo newRuntimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
long is90M = 90 * 1024 * 1024;
newRuntimeInfo.setUsableSpace(is90M);
original.update(newRuntimeInfo);
assertThat(original.isLowDiskSpace(), is(true));
}
@Test
public void shouldBeAbleToDenyAgentWhenItIsBuilding() throws Exception {
AgentInstance original = AgentInstance.createFromConfig(agentConfig, systemEnvironment);
AgentRuntimeInfo runtimeInfo = buildingRuntimeInfo();
original.update(runtimeInfo);
assertThat(original.canDisable(), is(true));
original.deny();
assertThat(agentConfig.isDisabled(), is(true));
assertThat(original.getStatus(), is(AgentStatus.Disabled));
assertThat(original.getBuildingInfo(), is(runtimeInfo.getBuildingInfo()));
}
@Test
public void shouldOrderByHostname() throws Exception {
AgentInstance agentA = new AgentInstance(new AgentConfig("UUID", "A", "127.0.0.1"), LOCAL, systemEnvironment);
AgentInstance agentB = new AgentInstance(new AgentConfig("UUID", "B", "127.0.0.2"), LOCAL, systemEnvironment);
assertThat(agentA.compareTo(agentA), is(0));
assertThat(agentA.compareTo(agentB), lessThan(0));
assertThat(agentB.compareTo(agentA), greaterThan(0));
}
@Test public void shouldNotBeEqualIfUuidIsNotEqual() throws Exception {
AgentInstance agentA = new AgentInstance(new AgentConfig("UUID", "A", "127.0.0.1"), LOCAL, systemEnvironment);
AgentInstance copyOfAgentA = new AgentInstance(new AgentConfig("UUID", "A", "127.0.0.1"),
LOCAL, systemEnvironment);
AgentInstance agentB = new AgentInstance(new AgentConfig("UUID", "B", "127.0.0.2"), LOCAL, systemEnvironment);
assertThat(agentA, is(not(agentB)));
assertThat(agentB, is(not(agentA)));
assertThat(agentA, is(copyOfAgentA));
}
@Test
public void shouldBeAbleToDenyAgentThatIsRunningCancelledJob() {
AgentConfig config = new AgentConfig("UUID", "A", "127.0.0.1");
AgentInstance agent = new AgentInstance(config, LOCAL, systemEnvironment);
agent.cancel();
AgentBuildingInfo cancelled = agent.getBuildingInfo();
assertThat(agent.canDisable(), is(true));
agent.deny();
assertThat(config.isDisabled(), is(true));
assertThat(agent.getStatus(), is(AgentStatus.Disabled));
assertThat(agent.getBuildingInfo(), is(cancelled));
}
@Test public void shouldReturnNullWhenNoMatchingJobs() throws Exception {
AgentInstance agentInstance = new AgentInstance(agentConfig("linux, mercurial"), LOCAL, systemEnvironment);
JobPlan matchingJob = agentInstance.firstMatching(new ArrayList<>());
assertThat(matchingJob, is(nullValue()));
}
@Test public void shouldReturnFirstMatchingJobPlan() throws Exception {
AgentInstance agentInstance = new AgentInstance(agentConfig("linux, mercurial"), LOCAL, systemEnvironment);
List<JobPlan> plans = jobPlans("linux, svn", "linux, mercurial");
JobPlan matchingJob = agentInstance.firstMatching(plans);
assertThat(matchingJob, is(plans.get(1)));
}
@Test public void shouldReturnAJobPlanWithMatchingUuidSet() throws Exception {
AgentConfig config = agentConfig("linux, mercurial");
AgentInstance agentInstance = new AgentInstance(config, LOCAL, systemEnvironment);
final JobPlan job = jobPlan("pipeline-name", "job-name", "resource", config.getUuid());
JobPlan matchingJob = agentInstance.firstMatching(new ArrayList<JobPlan>() {{
add(job);
}});
assertThat(matchingJob, is(job));
}
@Test public void shouldNotReturnAJobWithMismatchedUuid() throws Exception {
AgentConfig config = agentConfig("linux, mercurial");
AgentInstance agentInstance = new AgentInstance(config, LOCAL, systemEnvironment);
final JobPlan job = jobPlan("pipeline-name", "job-name", "linux", config.getUuid() + "-ensure-doesn't-match");
JobPlan matchingJob = agentInstance.firstMatching(new ArrayList<JobPlan>() {{
add(job);
}});
assertThat(matchingJob, is(nullValue()));
}
@Test public void shouldSetAgentToIdleWhenItIsApproved() {
AgentInstance pending = AgentInstanceMother.pending();
AgentConfig config = new AgentConfig(pending.getUuid(), pending.getHostname(), pending.getIpAddress());
pending.syncConfig(config);
AgentStatus status = pending.getStatus();
assertThat(status, is(AgentStatus.Idle));
}
@Test public void syncConfigShouldUpdateElasticAgentRuntimeInfo() {
AgentInstance agent = AgentInstanceMother.idle();
AgentConfig agentConfig = new AgentConfig(agent.getUuid(), agent.getHostname(), agent.getIpAddress());
agentConfig.setElasticAgentId("i-123456");
agentConfig.setElasticPluginId("com.example.aws");
assertFalse(agent.isElastic());
agent.syncConfig(agentConfig);
assertTrue(agent.isElastic());
assertEquals("i-123456", agent.elasticAgentMetadata().elasticAgentId());
assertEquals("com.example.aws", agent.elasticAgentMetadata().elasticPluginId());
}
@Test
public void shouldReturnFreeDiskSpace() throws Exception {
assertThat(AgentInstanceMother.updateRuntimeStatus(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.idle(new Date(), "CCeDev01"), 1024L), AgentRuntimeStatus.Missing).freeDiskSpace(), is(DiskSpace.unknownDiskSpace()));
assertThat(AgentInstanceMother.updateRuntimeStatus(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.idle(new Date(), "CCeDev01"), 1024L), AgentRuntimeStatus.LostContact).freeDiskSpace(), is(DiskSpace.unknownDiskSpace()));
assertThat(AgentInstanceMother.updateRuntimeStatus(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.idle(new Date(), "CCeDev01"), 1024L), AgentRuntimeStatus.Idle).freeDiskSpace(), is(new DiskSpace(1024L)));
assertThat(AgentInstanceMother.updateRuntimeStatus(AgentInstanceMother.updateUsableSpace(AgentInstanceMother.idle(new Date(), "CCeDev01"), null), AgentRuntimeStatus.Idle).freeDiskSpace(), is(DiskSpace.unknownDiskSpace()));
}
@Test
public void shouldReturnAppropriateMissingStatus() {
AgentInstance missing = AgentInstanceMother.missing();
assertTrue(missing.isMissing());
AgentInstance building = AgentInstanceMother.building();
assertFalse(building.isMissing());
}
@Test
public void shouldMatchJobPlanIfTheAgentWasLaunchedByTheSamePluginAsWasConfiguredForTheJob(){
AgentConfig agentConfig = new AgentConfig("uuid");
agentConfig.setElasticAgentId("elastic-agent-id-1");
String elasticPluginId = "elastic-plugin-id-1";
agentConfig.setElasticPluginId(elasticPluginId);
AgentInstance agentInstance = new AgentInstance(agentConfig, REMOTE, mock(SystemEnvironment.class));
DefaultJobPlan jobPlan1 = new DefaultJobPlan();
jobPlan1.setElasticProfile(new ElasticProfile("foo", elasticPluginId));
List<JobPlan> jobPlans = asList(jobPlan1, new DefaultJobPlan());
assertThat(agentInstance.firstMatching(jobPlans), Is.<JobPlan>is(jobPlan1));
}
@Test
public void shouldNotMatchJobPlanIfTheAgentWasLaunchedByADifferentPluginFromThatConfiguredForTheJob(){
AgentConfig agentConfig = new AgentConfig("uuid");
agentConfig.setElasticAgentId("elastic-agent-id-1");
String elasticPluginId = "elastic-plugin-id-1";
agentConfig.setElasticPluginId(elasticPluginId);
AgentInstance agentInstance = new AgentInstance(agentConfig, REMOTE, mock(SystemEnvironment.class));
DefaultJobPlan jobPlan1 = new DefaultJobPlan();
jobPlan1.setElasticProfile(new ElasticProfile("foo", "elastic-plugin-id-2"));
List<JobPlan> jobPlans = asList(jobPlan1, new DefaultJobPlan());
assertThat(agentInstance.firstMatching(jobPlans), is(nullValue()));
}
@Test
public void shouldNotMatchJobPlanIfTheAgentIsElasticAndJobHasResourcesDefined(){
AgentConfig agentConfig = new AgentConfig("uuid", "hostname", "11.1.1.1", new Resources(new Resource("r1")));
agentConfig.setElasticAgentId("elastic-agent-id-1");
String elasticPluginId = "elastic-plugin-id-1";
agentConfig.setElasticPluginId(elasticPluginId);
AgentInstance agentInstance = new AgentInstance(agentConfig, REMOTE, mock(SystemEnvironment.class));
DefaultJobPlan jobPlan1 = new DefaultJobPlan();
jobPlan1.setResources(asList(new Resource("r1")));
List<JobPlan> jobPlans = asList(jobPlan1, new DefaultJobPlan());
assertThat(agentInstance.firstMatching(jobPlans), is(nullValue()));
}
@Test
public void lostContact() {
AgentInstance agentInstance = AgentInstanceMother.building();
agentInstance.lostContact();
assertThat(agentInstance.getStatus(), is(AgentStatus.LostContact));
AgentInstance pendingInstance = AgentInstanceMother.pending();
pendingInstance.lostContact();
assertThat(pendingInstance.getStatus(), is(AgentStatus.Pending));
AgentInstance disabledInstance = AgentInstanceMother.disabled();
disabledInstance.lostContact();
assertThat(disabledInstance.getStatus(), is(AgentStatus.Disabled));
}
private List<JobPlan> jobPlans(String... resources) {
ArrayList<JobPlan> plans = new ArrayList<>();
int count = 1;
for (String resource : resources) {
plans.add(jobPlan("pipeline" + count, "job" + count, resource, null));
count++;
}
return plans;
}
private DefaultJobPlan jobPlan(String pipelineName, String jobName, String resource, String uuid) {
JobIdentifier jobIdentifier = new JobIdentifier(pipelineName, 1, "1", "stage1", "1", jobName, 1L);
DefaultJobPlan plan = new DefaultJobPlan(new Resources(resource), new ArtifactPlans(), null, 100, jobIdentifier, null, new EnvironmentVariablesConfig(), new EnvironmentVariablesConfig(), null);
plan.setAgentUuid(uuid);
return plan;
}
private AgentConfig agentConfig(String resources) {
return new AgentConfig("UUID", "A", "127.0.0.1", new Resources(resources));
}
private AgentRuntimeInfo buildingRuntimeInfo() {
return buildingRuntimeInfo(agentConfig);
}
private AgentRuntimeInfo buildingRuntimeInfo(AgentConfig agentConfig) {
AgentRuntimeInfo runtimeInfo = new AgentRuntimeInfo(agentConfig.getAgentIdentifier(), AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false);
runtimeInfo.busy(defaultBuildingInfo);
return runtimeInfo;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.manual;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.runtime.CopyableValueComparator;
import org.apache.flink.api.java.typeutils.runtime.CopyableValueSerializer;
import org.apache.flink.api.java.typeutils.runtime.RuntimeSerializerFactory;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.operators.sort.UnilateralSortMerger;
import org.apache.flink.runtime.operators.testutils.DummyInvokable;
import org.apache.flink.types.StringValue;
import org.apache.flink.util.MutableObjectIterator;
import org.junit.Assert;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Random;
/**
* Test {@link UnilateralSortMerger} on a large set of {@link StringValue}.
*/
public class MassiveStringValueSorting {
private static final long SEED = 347569784659278346L;
public void testStringValueSorting() {
File input = null;
File sorted = null;
try {
// the source file
input = generateFileWithStrings(300000, "http://some-uri.com/that/is/a/common/prefix/to/all");
// the sorted file
sorted = File.createTempFile("sorted_strings", "txt");
String[] command = {"/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\""};
Process p = null;
try {
p = Runtime.getRuntime().exec(command);
int retCode = p.waitFor();
if (retCode != 0) {
throw new Exception("Command failed with return code " + retCode);
}
p = null;
} finally {
if (p != null) {
p.destroy();
}
}
// sort the data
UnilateralSortMerger<StringValue> sorter = null;
BufferedReader reader = null;
BufferedReader verifyReader = null;
MemoryManager mm = null;
IOManager ioMan = null;
try {
mm = new MemoryManager(1024 * 1024, 1);
ioMan = new IOManagerAsync();
TypeSerializer<StringValue> serializer = new CopyableValueSerializer<StringValue>(StringValue.class);
TypeComparator<StringValue> comparator = new CopyableValueComparator<StringValue>(true, StringValue.class);
reader = new BufferedReader(new FileReader(input));
MutableObjectIterator<StringValue> inputIterator = new StringValueReaderMutableObjectIterator(reader);
sorter = new UnilateralSortMerger<StringValue>(mm, ioMan, inputIterator, new DummyInvokable(),
new RuntimeSerializerFactory<StringValue>(serializer, StringValue.class), comparator, 1.0, 4, 0.8f,
true /* use large record handler */, true);
MutableObjectIterator<StringValue> sortedData = sorter.getIterator();
reader.close();
// verify
verifyReader = new BufferedReader(new FileReader(sorted));
String nextVerify;
StringValue nextFromFlinkSort = new StringValue();
while ((nextVerify = verifyReader.readLine()) != null) {
nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
Assert.assertNotNull(nextFromFlinkSort);
Assert.assertEquals(nextVerify, nextFromFlinkSort.getValue());
}
}
finally {
if (reader != null) {
reader.close();
}
if (verifyReader != null) {
verifyReader.close();
}
if (sorter != null) {
sorter.close();
}
if (mm != null) {
mm.shutdown();
}
if (ioMan != null) {
ioMan.shutdown();
}
}
}
catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
if (input != null) {
//noinspection ResultOfMethodCallIgnored
input.delete();
}
if (sorted != null) {
//noinspection ResultOfMethodCallIgnored
sorted.delete();
}
}
}
@SuppressWarnings("unchecked")
public void testStringValueTuplesSorting() {
final int numStrings = 300000;
File input = null;
File sorted = null;
try {
// the source file
input = generateFileWithStringTuples(numStrings, "http://some-uri.com/that/is/a/common/prefix/to/all");
// the sorted file
sorted = File.createTempFile("sorted_strings", "txt");
String[] command = {"/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\""};
Process p = null;
try {
p = Runtime.getRuntime().exec(command);
int retCode = p.waitFor();
if (retCode != 0) {
throw new Exception("Command failed with return code " + retCode);
}
p = null;
} finally {
if (p != null) {
p.destroy();
}
}
// sort the data
UnilateralSortMerger<Tuple2<StringValue, StringValue[]>> sorter = null;
BufferedReader reader = null;
BufferedReader verifyReader = null;
MemoryManager mm = null;
IOManager ioMan = null;
try {
mm = new MemoryManager(1024 * 1024, 1);
ioMan = new IOManagerAsync();
TupleTypeInfo<Tuple2<StringValue, StringValue[]>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, StringValue[]>>)
new TypeHint<Tuple2<StringValue, StringValue[]>>(){}.getTypeInfo();
TypeSerializer<Tuple2<StringValue, StringValue[]>> serializer = typeInfo.createSerializer(new ExecutionConfig());
TypeComparator<Tuple2<StringValue, StringValue[]>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
reader = new BufferedReader(new FileReader(input));
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> inputIterator = new StringValueTupleReaderMutableObjectIterator(reader);
sorter = new UnilateralSortMerger<Tuple2<StringValue, StringValue[]>>(mm, ioMan, inputIterator, new DummyInvokable(),
new RuntimeSerializerFactory<Tuple2<StringValue, StringValue[]>>(serializer, (Class<Tuple2<StringValue, StringValue[]>>) (Class<?>) Tuple2.class), comparator, 1.0, 4, 0.8f,
true /* use large record handler */, false);
// use this part to verify that all if good when sorting in memory
// List<MemorySegment> memory = mm.allocatePages(new DummyInvokable(), mm.computeNumberOfPages(1024*1024*1024));
// NormalizedKeySorter<Tuple2<String, String[]>> nks = new NormalizedKeySorter<Tuple2<String,String[]>>(serializer, comparator, memory);
//
// {
// Tuple2<String, String[]> wi = new Tuple2<String, String[]>("", new String[0]);
// while ((wi = inputIterator.next(wi)) != null) {
// Assert.assertTrue(nks.write(wi));
// }
//
// new QuickSort().sort(nks);
// }
//
// MutableObjectIterator<Tuple2<String, String[]>> sortedData = nks.getIterator();
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> sortedData = sorter.getIterator();
reader.close();
// verify
verifyReader = new BufferedReader(new FileReader(sorted));
MutableObjectIterator<Tuple2<StringValue, StringValue[]>> verifyIterator = new StringValueTupleReaderMutableObjectIterator(verifyReader);
Tuple2<StringValue, StringValue[]> nextVerify = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
Tuple2<StringValue, StringValue[]> nextFromFlinkSort = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
int num = 0;
while ((nextVerify = verifyIterator.next(nextVerify)) != null) {
num++;
nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
Assert.assertNotNull(nextFromFlinkSort);
Assert.assertEquals(nextVerify.f0, nextFromFlinkSort.f0);
Assert.assertArrayEquals(nextVerify.f1, nextFromFlinkSort.f1);
}
Assert.assertNull(sortedData.next(nextFromFlinkSort));
Assert.assertEquals(numStrings, num);
}
finally {
if (reader != null) {
reader.close();
}
if (verifyReader != null) {
verifyReader.close();
}
if (sorter != null) {
sorter.close();
}
if (mm != null) {
mm.shutdown();
}
if (ioMan != null) {
ioMan.shutdown();
}
}
}
catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
if (input != null) {
//noinspection ResultOfMethodCallIgnored
input.delete();
}
if (sorted != null) {
//noinspection ResultOfMethodCallIgnored
sorted.delete();
}
}
}
// --------------------------------------------------------------------------------------------
private static final class StringValueReaderMutableObjectIterator implements MutableObjectIterator<StringValue> {
private final BufferedReader reader;
public StringValueReaderMutableObjectIterator(BufferedReader reader) {
this.reader = reader;
}
@Override
public StringValue next(StringValue reuse) throws IOException {
String line = reader.readLine();
if (line == null) {
return null;
}
reuse.setValue(line);
return reuse;
}
@Override
public StringValue next() throws IOException {
return next(new StringValue());
}
}
private static final class StringValueTupleReaderMutableObjectIterator implements MutableObjectIterator<Tuple2<StringValue, StringValue[]>> {
private final BufferedReader reader;
public StringValueTupleReaderMutableObjectIterator(BufferedReader reader) {
this.reader = reader;
}
@Override
public Tuple2<StringValue, StringValue[]> next(Tuple2<StringValue, StringValue[]> reuse) throws IOException {
String line = reader.readLine();
if (line == null) {
return null;
}
String[] parts = line.split(" ");
reuse.f0.setValue(parts[0]);
reuse.f1 = new StringValue[parts.length];
for (int i = 0; i < parts.length; i++) {
reuse.f1[i] = new StringValue(parts[i]);
}
return reuse;
}
@Override
public Tuple2<StringValue, StringValue[]> next() throws IOException {
return next(new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]));
}
}
// --------------------------------------------------------------------------------------------
private File generateFileWithStrings(int numStrings, String prefix) throws IOException {
final Random rnd = new Random(SEED);
final StringBuilder bld = new StringBuilder();
final int resetValue = prefix.length();
bld.append(prefix);
File f = File.createTempFile("strings", "txt");
BufferedWriter wrt = null;
try {
wrt = new BufferedWriter(new FileWriter(f));
for (int i = 0; i < numStrings; i++) {
bld.setLength(resetValue);
int len = rnd.nextInt(20) + 300;
for (int k = 0; k < len; k++) {
char c = (char) (rnd.nextInt(80) + 40);
bld.append(c);
}
String str = bld.toString();
wrt.write(str);
wrt.newLine();
}
} finally {
if (wrt != null) {
wrt.close();
}
}
return f;
}
private File generateFileWithStringTuples(int numStrings, String prefix) throws IOException {
final Random rnd = new Random(SEED);
final StringBuilder bld = new StringBuilder();
File f = File.createTempFile("strings", "txt");
BufferedWriter wrt = null;
try {
wrt = new BufferedWriter(new FileWriter(f));
for (int i = 0; i < numStrings; i++) {
bld.setLength(0);
int numComps = rnd.nextInt(5) + 1;
for (int z = 0; z < numComps; z++) {
if (z > 0) {
bld.append(' ');
}
bld.append(prefix);
int len = rnd.nextInt(20) + 10;
for (int k = 0; k < len; k++) {
char c = (char) (rnd.nextInt(80) + 40);
bld.append(c);
}
}
String str = bld.toString();
wrt.write(str);
wrt.newLine();
}
} finally {
if (wrt != null) {
wrt.close();
}
}
return f;
}
// --------------------------------------------------------------------------------------------
public static void main(String[] args) {
new MassiveStringValueSorting().testStringValueSorting();
new MassiveStringValueSorting().testStringValueTuplesSorting();
}
}
| |
package org.apache.storm.benchmark.metrics;
import org.apache.storm.Config;
import org.apache.storm.generated.*;
import org.apache.storm.utils.NimbusClient;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class MetricsSample {
private static final Logger LOG = LoggerFactory.getLogger(MetricsSample.class);
private long sampleTime = -1;
private long totalTransferred = 0l;
private long totalEmitted = 0l;
private long totalAcked = 0l;
private long totalFailed = 0l;
private double totalLatency;
private long spoutEmitted = 0l;
private long spoutTransferred = 0l;
private int spoutExecutors = 0;
private int numSupervisors = 0;
private int numWorkers = 0;
private int numTasks = 0;
private int numExecutors = 0;
private int totalSlots = 0;
private int usedSlots = 0;
public static void main(String[] args) throws Exception {
Config config = new Config();
config.put("nimbus.thrift.port", 6627);
config.put("storm.messaging.transport", "org.apache.storm.messaging.netty.Context");
config.put("storm.thrift.transport", "org.apache.storm.security.auth.SimpleTransportPlugin");
config.put("storm.nimbus.retry.times", 5);
config.put("storm.nimbus.retry.interval.millis", 2000);
config.put("storm.nimbus.retry.intervalceiling.millis", 60000);
config.put("nimbus.thrift.max_buffer_size", 1048576);
ArrayList<String> seeds = new ArrayList<>();
seeds.add("localhost");
config.put("nimbus.seeds", seeds);
Nimbus.Client nimbus = NimbusClient.getConfiguredClient(config).getClient();
factory(nimbus, "trident-wordcount");
}
public static MetricsSample factory(Nimbus.Client client, String topologyName) throws Exception {
LOG.info("************ Sampling Metrics *****************");
MetricsSample ret = new MetricsSample();
ClusterSummary clusterSummary = client.getClusterInfo();
LOG.info("*** Read Cluster Summary **");
List<SupervisorSummary> supervisors = clusterSummary.get_supervisors();
int totalSlots = 0;
int usedSlots = 0;
for(SupervisorSummary supervisor : supervisors){
totalSlots += supervisor.get_num_workers();
usedSlots += supervisor.get_num_used_workers();
}
int freeSlots = totalSlots - usedSlots;
TopologySummary topSummary = getTopologySummary(clusterSummary, topologyName);
int topologyExecutors = topSummary.get_num_executors();
int topologyWorkers = topSummary.get_num_workers();
int topologyTasks = topSummary.get_num_tasks();
LOG.info("getting topology info for {}", topologyName);
TopologyInfo topInfo = client.getTopologyInfo(topSummary.get_id());
List<ExecutorSummary> executorSummaries = topInfo.get_executors();
// total
long totalTransferred = 0l;
long totalEmitted = 0l;
long totalAcked = 0l;
long totalFailed = 0l;
// number of spout executors
int spoutExecCount = 0;
double spoutLatencySum = 0.0;
long spoutEmitted = 0l;
long spoutTransferred = 0l;
// Executor summaries
for(ExecutorSummary executorSummary : executorSummaries){
ExecutorStats execuatorStats = executorSummary.get_stats();
if(execuatorStats == null){
continue;
}
// doesn't appear to return what you would expect...
// seems more like # of tasks/executors
// LOG.debug(String.format("\t\temitted: %s, transferred: %s",
// execuatorStats.get_emitted_size(),
// execuatorStats.get_transferred_size()
// ));
ExecutorSpecificStats executorSpecificStats = execuatorStats.get_specific();
if(executorSpecificStats == null){
// bail out
continue;
}
// transferred totals
Map<String,Map<String,Long>> transferred = execuatorStats.get_transferred();
Map<String, Long> txMap = transferred.get(":all-time");
if(txMap == null){
continue;
}
for(String key : txMap.keySet()){
// todo, ignore the master batch coordinator ?
if(!Utils.isSystemId(key)){
Long count = txMap.get(key);
totalTransferred += count;
if(executorSpecificStats.is_set_spout()){
spoutTransferred += count;
}
}
}
LOG.info("*** read transferred totals...");
// emitted totals
Map<String,Map<String,Long>> emitted = execuatorStats.get_emitted();
if(emitted != null) {
LOG.info("emitted: {}", emitted);
Map<String, Long> emMap = emitted.get(":all-time");
for (String key : emMap.keySet()) {
if (!Utils.isSystemId(key)) {
Long count = emMap.get(key);
totalEmitted += count;
if (executorSpecificStats.is_set_spout()) {
spoutEmitted += count;
}
}
}
}
LOG.info("*** read emitted totals...");
// we found a spout
if(executorSpecificStats.isSet(2)) { // spout
LOG.info("Found a spout.");
SpoutStats spoutStats = executorSpecificStats.get_spout();
Map<String, Long> acked = spoutStats.get_acked().get(":all-time");
if(acked != null){
for(String key : acked.keySet()) {
totalAcked += acked.get(key);
}
}
Map<String, Long> failed = spoutStats.get_failed().get(":all-time");
if(failed != null){
for(String key : failed.keySet()) {
totalFailed += failed.get(key);
}
}
Double total = 0d;
Map<String, Double> vals = spoutStats.get_complete_ms_avg().get(":all-time");
for(String key : vals.keySet()){
total += vals.get(key);
}
Double latency = total / vals.size();
spoutExecCount++;
spoutLatencySum += latency;
}
// we found a bolt
if(executorSpecificStats.isSet(1)) {
LOG.info("ignoring bolt status.");
}
} // end executor summary
LOG.info("====== RESULTS ======");
LOG.info("Total emitted: {}", totalEmitted);
LOG.info("Total transferred: {}", totalTransferred);
LOG.info("Total avg latency: {}", spoutLatencySum / spoutExecCount);
LOG.info("Spout emitted: {}", spoutEmitted);
LOG.info("Spout transferred: {}", spoutTransferred);
LOG.info("Total Acked: {}", totalAcked);
LOG.info("Total Failed: {}", totalFailed);
ret.totalEmitted = totalEmitted;
ret.totalTransferred = totalTransferred;
ret.totalAcked = totalAcked;
ret.totalFailed = totalFailed;
ret.totalLatency = spoutLatencySum/spoutExecCount;
ret.spoutEmitted = spoutEmitted;
ret.spoutTransferred = spoutTransferred;
ret.sampleTime = System.currentTimeMillis();
ret.numSupervisors = clusterSummary.get_supervisors_size();
ret.totalSlots = totalSlots;
ret.usedSlots = usedSlots;
ret.numWorkers = topologyWorkers;
ret.numExecutors = topologyExecutors;
ret.numTasks = topologyTasks;
ret.spoutExecutors = spoutExecCount;
return ret;
}
public static TopologySummary getTopologySummary(ClusterSummary cs, String name) {
for (TopologySummary ts : cs.get_topologies()) {
if (name.equals(ts.get_name())) {
LOG.info("*** returning topology summary");
return ts;
}
}
return null;
}
// getters
public long getSampleTime() {
return sampleTime;
}
public long getTotalTransferred() {
return totalTransferred;
}
public long getTotalEmitted() {
return totalEmitted;
}
public long getTotalAcked() {
return totalAcked;
}
public long getTotalFailed() {
return totalFailed;
}
public double getTotalLatency() {
return totalLatency;
}
public long getSpoutEmitted() {
return spoutEmitted;
}
public long getSpoutTransferred() {
return spoutTransferred;
}
public int getNumSupervisors() {
return numSupervisors;
}
public int getNumWorkers() {
return numWorkers;
}
public int getNumTasks() {
return numTasks;
}
public int getTotalSlots() {
return totalSlots;
}
public int getSpoutExecutors(){
return this.spoutExecutors;
}
public int getNumExecutors() {
return this.numExecutors;
}
public int getUsedSlots() {
return this.usedSlots;
}
}
| |
/*
* AbstractSampleLAICPMSRawDataManagerDialog.java
*
*
* Copyright 2006-2018 James F. Bowring, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.UPb_Redux.dialogs.sampleManagers.rawDataManagers;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.io.FileNotFoundException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Vector;
import org.earthtime.UPb_Redux.ReduxConstants.ANALYSIS_PURPOSE;
import org.earthtime.UPb_Redux.aliquots.UPbReduxAliquot;
import org.earthtime.dialogs.DialogEditor;
import org.earthtime.UPb_Redux.exceptions.BadImportedCSVLegacyFileException;
import org.earthtime.UPb_Redux.exceptions.BadLabDataException;
import org.earthtime.UPb_Redux.fractions.FractionI;
import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbFractionI;
import org.earthtime.reduxLabData.ReduxLabData;
import org.earthtime.UPb_Redux.samples.UPbSampleInterface;
import org.earthtime.UPb_Redux.samples.sampleImporters.AbstractSampleImporterFromLegacyCSVFile;
import org.earthtime.aliquots.AliquotInterface;
import org.earthtime.dataDictionaries.MineralTypes;
import org.earthtime.dataDictionaries.SampleRegistries;
import org.earthtime.exceptions.ETException;
import org.earthtime.exceptions.ETWarningDialog;
import org.earthtime.fractions.ETFractionInterface;
import org.earthtime.ratioDataModels.AbstractRatiosDataModel;
import org.earthtime.samples.SampleInterface;
/**
*
* @author James F. Bowring
*/
public abstract class AbstractSampleLAICPMSRawDataManagerDialog extends DialogEditor {
private SampleInterface mySample = null;
private File importFractionFolderMRU;
private boolean initialized = false;
private boolean newSample = false;
private AbstractSampleImporterFromLegacyCSVFile converter;
/**
* Creates new form AbstractSampleLegacyManagerDialog
* @param parent
* @param importFractionFolderMRU
* @param modal
* @param sample
* @param dataTypeTitle
* @param converter
*/
public AbstractSampleLAICPMSRawDataManagerDialog (
java.awt.Frame parent,
boolean modal,
String dataTypeTitle,
SampleInterface sample,
AbstractSampleImporterFromLegacyCSVFile converter,
File importFractionFolderMRU ) {
super( parent, modal );
this.importFractionFolderMRU = importFractionFolderMRU;
initComponents();
this.mySample = sample;
initSampleFields();
if ( sample.getFractions().size() > 0 ) {
// we are in edit mode
fractionDestinationPanel_panel.setVisible( false );
fractionSourcePanel_panel.setVisible( false );
}
sampleType_label.setText( dataTypeTitle + sampleType_label.getText() );
this.converter = converter;
}
/**
*
*/
public void setSize () {
setSize( 480, 670 );
}
/**
*
* @return
*/
public File getImportFractionFolderMRU () {
return importFractionFolderMRU;
}
/**
*
* @param importFractionFolderMRU
*/
public void setImportFractionFolderMRU ( File importFractionFolderMRU ) {
this.importFractionFolderMRU = importFractionFolderMRU;
}
private void validateSampleID () {
if ( ! mySample.isArchivedInRegistry() ) {
boolean valid = SampleRegistries.isSampleIdentifierValidAtRegistry(//
mySample.getSampleIGSN() );
validSampleID_label.setText( (String) (valid ? "Sample ID is Valid at registry." : "Sample ID is NOT valid at registry.") );
mySample.setValidatedSampleIGSN( valid );
}
}
private void initSampleFields () {
// init input fields
sampleName_text.setDocument(
new UnDoAbleDocument( sampleName_text, ! mySample.isArchivedInRegistry() ) );
sampleName_text.setText( getMySample().getSampleName() );
sampleIGSN_text.setDocument(
new UnDoAbleDocument( sampleIGSN_text, ! mySample.isArchivedInRegistry() ) );
sampleIGSN_text.setText( getMySample().getSampleIGSNnoRegistry() );
for (SampleRegistries sr : SampleRegistries.values()) {
sampleRegistryChooser.addItem( sr );
}
sampleRegistryChooser.setEnabled( ! mySample.isArchivedInRegistry() );
sampleRegistryChooser.setSelectedItem( mySample.getSampleRegistry() );
sampleRegistryChooser.addActionListener( new ActionListener() {
@Override
public void actionPerformed ( ActionEvent e ) {
mySample.setSampleIGSN( ((SampleRegistries) sampleRegistryChooser.getSelectedItem()).getCode() + "." + sampleIGSN_text.getText() );
validateSampleID();
}
} );
// april 2011
validateSampleID();
sampleNotes_textArea.setDocument( new UnDoAbleDocument( sampleNotes_textArea, true ) );
sampleNotes_textArea.setText( getMySample().getSampleAnnotations() );
// init display fields - html allows multi-line
sampleReduxFileName_label.setText(
"<html><p>" + getMySample().getReduxSampleFilePath() + "</p></html>" );
sampleReduxFileName_label.setToolTipText( getMySample().getReduxSampleFilePath() );
physicalConstantsModelChooser.removeAllItems();
ArrayList<AbstractRatiosDataModel> physicalConstantsModels = ReduxLabData.getInstance().getPhysicalConstantsModels();
for (int i = (physicalConstantsModels.size() > 1 ? 1 : 0); i < physicalConstantsModels.size(); i ++) {
physicalConstantsModelChooser.addItem( physicalConstantsModels.get( i ).getNameAndVersion() );
}
physicalConstantsModelChooser.setSelectedIndex( 0 );
try {
physicalConstantsModelChooser.setSelectedItem( getMySample().getPhysicalConstantsModel().getNameAndVersion() );
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
// set up StandardMineral chooser
standardMineralNameChooser.removeAllItems();
for (int i = 0; i < MineralTypes.values().length; i ++) {
standardMineralNameChooser.addItem( MineralTypes.values()[i].getName() );
}
standardMineralNameChooser.setSelectedItem( mySample.getMineralName() );
standardMineralNameChooser.addItemListener( new MineralNameItemListener() );
// set up analysisPurposeChooser
analysisPurposeChooser.removeAllItems();
for (ANALYSIS_PURPOSE ap : ANALYSIS_PURPOSE.values()) {
analysisPurposeChooser.addItem( ap.toString() );
}
analysisPurposeChooser.setSelectedItem( mySample.getAnalysisPurpose().toString() );
analysisPurposeChooser.addItemListener( new AnalysisPurposeItemListener() );
if ( getMySample().isCalculateTWrhoForLegacyData() ) {
TWCalculateRho_radioBut.setSelected( true );
} else {
TWZeroRho_radioBut.setSelected( true );
}
}
class MineralNameItemListener implements ItemListener {
// This method is called only if a new item has been selected.
@Override
public void itemStateChanged ( ItemEvent evt ) {
if ( evt.getStateChange() == ItemEvent.SELECTED ) {
// Item was just selected
mySample.setMineralName( (String) evt.getItem() );
} else if ( evt.getStateChange() == ItemEvent.DESELECTED ) {
// Item is no longer selected
}
}
}
class AnalysisPurposeItemListener implements ItemListener {
// This method is called only if a new item has been selected.
public void itemStateChanged ( ItemEvent evt ) {
if ( evt.getStateChange() == ItemEvent.SELECTED ) {
// Item was just selected
mySample.setAnalysisPurpose( ANALYSIS_PURPOSE.valueOf( (String) evt.getItem() ) );
} else if ( evt.getStateChange() == ItemEvent.DESELECTED ) {
// Item is no longer selected
}
}
}
private void saveSampleData ()
throws ETException {
// validate sample name
if ( (sampleName_text.getText().trim().length() == 0)
|| (aliquotName_text.getText().trim().length() == 0) ) {
return;
}
mySample.setSampleName( sampleName_text.getText().trim() );
mySample.setSampleIGSN( ((SampleRegistries) sampleRegistryChooser.getSelectedItem()).getCode() + "." + sampleIGSN_text.getText().trim() );
mySample.setSampleRegistry( (SampleRegistries) sampleRegistryChooser.getSelectedItem() );
mySample.setSampleAnnotations( sampleNotes_textArea.getText() );
String currentPhysicalConstantsModelName = "";
try {
currentPhysicalConstantsModelName = getMySample().getPhysicalConstantsModel().getNameAndVersion();
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
if ( ! ((String) physicalConstantsModelChooser.getSelectedItem()).equalsIgnoreCase( currentPhysicalConstantsModelName ) ) {
try {
getMySample().setPhysicalConstantsModel(
ReduxLabData.getInstance().
getAPhysicalConstantsModel( ((String) physicalConstantsModelChooser.getSelectedItem()) ) );
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}
// in legacy mode we only allow one aliquot
if ( getMySample().getFractions().isEmpty() ) {
AliquotInterface myAliquot = getMySample().addNewAliquot( aliquotName_text.getText().trim() );
// May 2010 allows publication of legacy results
((UPbReduxAliquot) myAliquot).setCompiled( false );
int myAliquotNumber = ((UPbReduxAliquot) myAliquot).getAliquotNumber();
// test for manual mode or bulk import from CSV file
if ( manualMode_radioBut.isSelected() ) {
try {
((UPbSampleInterface)mySample).addDefaultUPbLegacyFractionToAliquot( myAliquotNumber );
setInitialized( true );
getMySample().setChanged( true );
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
} else {
// bulk mode
try {
converter.setMruFolder( importFractionFolderMRU );
getMySample().addFractionsVector( converter.readInFractions(), myAliquotNumber );
myAliquot.setAliquotName( converter.getAliquotName() );
setInitialized( true );
getMySample().setChanged( true );
setImportFractionFolderMRU( converter.getMruFolder() );
} catch (FileNotFoundException fileNotFoundException) {
} catch (BadImportedCSVLegacyFileException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}
}
if ( TWZeroRho_radioBut.isSelected() ) {
getMySample().setCalculateTWrhoForLegacyData( false );
} else {
getMySample().setCalculateTWrhoForLegacyData( true );
}
// moved outside conditional oct 2010 and added MineralName, etc ;;June 2010 add physical constants model
for (ETFractionInterface f : getMySample().getFractions()) {
try {
f.setPhysicalConstantsModel( getMySample().getPhysicalConstantsModel() );
((FractionI)f).setMineralName( mySample.getMineralName() );
if ( mySample.getMineralName().equalsIgnoreCase( "zircon" ) ) {
((FractionI)f).setZircon( true );
} else {
((FractionI)f).setZircon( false );
}
f.setLegacy( true );
if ( TWZeroRho_radioBut.isSelected() ) {
// set all T-W to zero
f.getRadiogenicIsotopeRatioByName( "rhoR207_206r__r238_206r" )//
.setValue( BigDecimal.ZERO );
} else {
// calculate all T-W
((UPbFractionI) f).calculateTeraWasserburgRho();
}
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}
// there should be only one aliquot
Vector<AliquotInterface> aliquots = mySample.getActiveAliquots();
for (AliquotInterface a : aliquots) {
a.setAnalysisPurpose( mySample.getAnalysisPurpose() );
}
}
/**
*
* @return
*/
public SampleInterface getMySample () {
return mySample;
}
/**
*
* @param mySample
*/
public void setMySample ( SampleInterface mySample ) {
this.mySample = mySample;
}
/**
*
* @return
*/
public boolean isInitialized () {
return initialized;
}
/**
*
* @param isSaved
*/
public void setInitialized ( boolean isSaved ) {
this.initialized = isSaved;
}
/**
*
* @return
*/
public boolean isNewSample () {
return newSample;
}
/**
*
* @param newSample
*/
public void setNewSample ( boolean newSample ) {
this.newSample = newSample;
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
sourceOfFractionsOptions_buttonGroup = new javax.swing.ButtonGroup();
destinationOfFractionsOptions_buttonGroup = new javax.swing.ButtonGroup();
updateMode_buttonGroup = new javax.swing.ButtonGroup();
modeChooser_buttonGroup = new javax.swing.ButtonGroup();
TWsource = new javax.swing.ButtonGroup();
jPanel1 = new javax.swing.JPanel();
sampleName_label = new javax.swing.JLabel();
sampleName_text = new javax.swing.JTextField();
sampleIGSN_label = new javax.swing.JLabel();
sampleIGSN_text = new javax.swing.JTextField();
sampleReduxFile_label = new javax.swing.JLabel();
sampleReduxFileName_label = new javax.swing.JLabel();
sampleNotes_label = new javax.swing.JLabel();
sampleNotes_scrollPane = new javax.swing.JScrollPane();
sampleNotes_textArea = new javax.swing.JTextArea();
fractionDestinationPanel_panel = new javax.swing.JPanel();
aliquotName_text = new javax.swing.JTextField();
aliquotName_label = new javax.swing.JLabel();
physicalConstantsModelChooser = new javax.swing.JComboBox<String>();
defaultHeader_label = new javax.swing.JLabel();
fractionSourcePanel_panel = new javax.swing.JPanel();
manualMode_radioBut = new javax.swing.JRadioButton();
bulkMode_radioBut = new javax.swing.JRadioButton();
standardMineralNameChooser = new javax.swing.JComboBox<String>();
chooseStandardMineral_label = new javax.swing.JLabel();
chooseAnalysisPurpose_label = new javax.swing.JLabel();
analysisPurposeChooser = new javax.swing.JComboBox<String>();
chooseTWrho_label = new javax.swing.JLabel();
TWZeroRho_radioBut = new javax.swing.JRadioButton();
TWCalculateRho_radioBut = new javax.swing.JRadioButton();
sampleIGSN_label1 = new javax.swing.JLabel();
sampleRegistryChooser = new javax.swing.JComboBox<SampleRegistries>();
validSampleID_label = new javax.swing.JLabel();
jPanel2 = new javax.swing.JPanel();
close = new javax.swing.JButton();
saveAndClose = new javax.swing.JButton();
sampleType_panel = new javax.swing.JPanel();
sampleType_label = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setResizable(false);
jPanel1.setBackground(new java.awt.Color(245, 236, 206));
jPanel1.setBorder(javax.swing.BorderFactory.createEtchedBorder(javax.swing.border.EtchedBorder.RAISED));
jPanel1.setMaximumSize(new java.awt.Dimension(480, 620));
sampleName_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
sampleName_label.setText("Lab's Local Sample Name:");
sampleName_text.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
sampleName_text.setHorizontalAlignment(javax.swing.JTextField.CENTER);
sampleName_text.setText("Sample Name");
sampleIGSN_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
sampleIGSN_label.setText("Sample ID:");
sampleIGSN_text.setEditable(false);
sampleIGSN_text.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
sampleIGSN_text.setHorizontalAlignment(javax.swing.JTextField.CENTER);
sampleIGSN_text.setText("<none>");
sampleIGSN_text.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
sampleIGSN_textFocusLost(evt);
}
});
sampleReduxFile_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
sampleReduxFile_label.setText("File path for this Sample:");
sampleReduxFileName_label.setText("<Not Saved>");
sampleReduxFileName_label.setVerticalAlignment(javax.swing.SwingConstants.TOP);
sampleReduxFileName_label.setAutoscrolls(true);
sampleNotes_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
sampleNotes_label.setText("Notes about this Sample:");
sampleNotes_textArea.setColumns(20);
sampleNotes_textArea.setRows(5);
sampleNotes_textArea.setMaximumSize(new java.awt.Dimension(250, 80));
sampleNotes_textArea.setPreferredSize(new java.awt.Dimension(250, 80));
sampleNotes_scrollPane.setViewportView(sampleNotes_textArea);
fractionDestinationPanel_panel.setBackground(new java.awt.Color(255, 255, 224));
fractionDestinationPanel_panel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Destination of Fractions", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Lucida Grande", 0, 13), new java.awt.Color(204, 0, 0))); // NOI18N
aliquotName_text.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
aliquotName_text.setHorizontalAlignment(javax.swing.JTextField.CENTER);
aliquotName_text.setText("aliquot");
aliquotName_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
aliquotName_label.setText("Enter your aliquot name:");
org.jdesktop.layout.GroupLayout fractionDestinationPanel_panelLayout = new org.jdesktop.layout.GroupLayout(fractionDestinationPanel_panel);
fractionDestinationPanel_panel.setLayout(fractionDestinationPanel_panelLayout);
fractionDestinationPanel_panelLayout.setHorizontalGroup(
fractionDestinationPanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(fractionDestinationPanel_panelLayout.createSequentialGroup()
.addContainerGap()
.add(aliquotName_label)
.add(18, 18, 18)
.add(aliquotName_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 195, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap(62, Short.MAX_VALUE))
);
fractionDestinationPanel_panelLayout.setVerticalGroup(
fractionDestinationPanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, fractionDestinationPanel_panelLayout.createSequentialGroup()
.add(fractionDestinationPanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(aliquotName_label)
.add(aliquotName_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
physicalConstantsModelChooser.setBackground(new java.awt.Color(245, 236, 206));
defaultHeader_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
defaultHeader_label.setForeground(new java.awt.Color(204, 51, 0));
defaultHeader_label.setText("Set Physical Constants Model for this Sample:");
fractionSourcePanel_panel.setBackground(new java.awt.Color(255, 255, 224));
fractionSourcePanel_panel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Source of Fractions", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Lucida Grande", 0, 13), new java.awt.Color(204, 0, 0))); // NOI18N
modeChooser_buttonGroup.add(manualMode_radioBut);
manualMode_radioBut.setText("Manual Mode");
modeChooser_buttonGroup.add(bulkMode_radioBut);
bulkMode_radioBut.setSelected(true);
bulkMode_radioBut.setText("Bulk Mode from CSV file");
org.jdesktop.layout.GroupLayout fractionSourcePanel_panelLayout = new org.jdesktop.layout.GroupLayout(fractionSourcePanel_panel);
fractionSourcePanel_panel.setLayout(fractionSourcePanel_panelLayout);
fractionSourcePanel_panelLayout.setHorizontalGroup(
fractionSourcePanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(fractionSourcePanel_panelLayout.createSequentialGroup()
.addContainerGap()
.add(bulkMode_radioBut)
.add(29, 29, 29)
.add(manualMode_radioBut)
.addContainerGap(80, Short.MAX_VALUE))
);
fractionSourcePanel_panelLayout.setVerticalGroup(
fractionSourcePanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(fractionSourcePanel_panelLayout.createSequentialGroup()
.add(fractionSourcePanel_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(bulkMode_radioBut)
.add(manualMode_radioBut))
.addContainerGap(12, Short.MAX_VALUE))
);
chooseStandardMineral_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
chooseStandardMineral_label.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
chooseStandardMineral_label.setText(" Specify standard mineral for all fractions:");
chooseAnalysisPurpose_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
chooseAnalysisPurpose_label.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
chooseAnalysisPurpose_label.setText(" Specify analysis purpose for this sample:");
chooseTWrho_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
chooseTWrho_label.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
chooseTWrho_label.setText("Set source of Terra-Wasserberg rho:");
TWsource.add(TWZeroRho_radioBut);
TWZeroRho_radioBut.setText("Zero");
TWsource.add(TWCalculateRho_radioBut);
TWCalculateRho_radioBut.setSelected(true);
TWCalculateRho_radioBut.setText("Calculated");
sampleIGSN_label1.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
sampleIGSN_label1.setText("Registry:");
sampleRegistryChooser.setBackground(new java.awt.Color(245, 236, 206));
validSampleID_label.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
validSampleID_label.setForeground(new java.awt.Color(204, 51, 0));
validSampleID_label.setText("Sample ID is Valid at registry.");
org.jdesktop.layout.GroupLayout jPanel1Layout = new org.jdesktop.layout.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap(39, Short.MAX_VALUE)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING, false)
.add(org.jdesktop.layout.GroupLayout.TRAILING, chooseTWrho_label)
.add(org.jdesktop.layout.GroupLayout.TRAILING, chooseStandardMineral_label)
.add(org.jdesktop.layout.GroupLayout.TRAILING, chooseAnalysisPurpose_label))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 9, Short.MAX_VALUE)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, analysisPurposeChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 174, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(org.jdesktop.layout.GroupLayout.TRAILING, standardMineralNameChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 174, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(org.jdesktop.layout.GroupLayout.TRAILING, jPanel1Layout.createSequentialGroup()
.add(TWZeroRho_radioBut)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(TWCalculateRho_radioBut)))
.addContainerGap())
.add(org.jdesktop.layout.GroupLayout.TRAILING, jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(fractionDestinationPanel_panel, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.add(20, 20, 20))
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(defaultHeader_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(physicalConstantsModelChooser, 0, 182, Short.MAX_VALUE)
.add(20, 20, 20))
.add(org.jdesktop.layout.GroupLayout.TRAILING, jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(fractionSourcePanel_panel, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
.add(org.jdesktop.layout.GroupLayout.TRAILING, jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)
.add(validSampleID_label)
.add(jPanel1Layout.createSequentialGroup()
.add(sampleIGSN_label1)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(sampleRegistryChooser, 0, 150, Short.MAX_VALUE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(sampleIGSN_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(sampleIGSN_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 158, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(jPanel1Layout.createSequentialGroup()
.add(sampleName_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(sampleName_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 199, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)))
.add(20, 20, 20))
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(sampleNotes_scrollPane, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 436, Short.MAX_VALUE)
.addContainerGap())
.add(jPanel1Layout.createSequentialGroup()
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.add(20, 20, 20)
.add(sampleReduxFileName_label, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 407, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(sampleReduxFile_label)))
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(sampleNotes_label)))
.addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(sampleName_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(sampleName_label))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(sampleRegistryChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(sampleIGSN_text, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(sampleIGSN_label)
.add(sampleIGSN_label1))
.add(4, 4, 4)
.add(validSampleID_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(defaultHeader_label)
.add(physicalConstantsModelChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(fractionSourcePanel_panel, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(fractionDestinationPanel_panel, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 61, Short.MAX_VALUE)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(chooseAnalysisPurpose_label)
.add(analysisPurposeChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(chooseStandardMineral_label)
.add(standardMineralNameChooser, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(6, 6, 6)
.add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(TWCalculateRho_radioBut)
.add(chooseTWrho_label)
.add(TWZeroRho_radioBut))
.add(20, 20, 20)
.add(sampleNotes_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(sampleNotes_scrollPane, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(sampleReduxFile_label)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(sampleReduxFileName_label, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 64, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel2.setBorder(javax.swing.BorderFactory.createBevelBorder(javax.swing.border.BevelBorder.LOWERED));
close.setForeground(new java.awt.Color(255, 51, 0));
close.setText("Cancel");
close.setMargin(new java.awt.Insets(0, 0, 0, 0));
close.setPreferredSize(new java.awt.Dimension(110, 23));
close.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
closeActionPerformed(evt);
}
});
saveAndClose.setForeground(new java.awt.Color(255, 51, 0));
saveAndClose.setText("OK");
saveAndClose.setMargin(new java.awt.Insets(0, 0, 0, 0));
saveAndClose.setPreferredSize(new java.awt.Dimension(110, 23));
saveAndClose.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
saveAndCloseActionPerformed(evt);
}
});
org.jdesktop.layout.GroupLayout jPanel2Layout = new org.jdesktop.layout.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.add(saveAndClose, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 168, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(105, 105, 105)
.add(close, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 168, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap(18, Short.MAX_VALUE))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(saveAndClose, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 32, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(close, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 32, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
);
sampleType_panel.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0)));
sampleType_label.setBackground(new java.awt.Color(255, 204, 102));
sampleType_label.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
sampleType_label.setText(" R A W D A T A M O D E for a Sample");
sampleType_label.setOpaque(true);
org.jdesktop.layout.GroupLayout sampleType_panelLayout = new org.jdesktop.layout.GroupLayout(sampleType_panel);
sampleType_panel.setLayout(sampleType_panelLayout);
sampleType_panelLayout.setHorizontalGroup(
sampleType_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(sampleType_label, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 477, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
);
sampleType_panelLayout.setVerticalGroup(
sampleType_panelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(sampleType_label)
);
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel2, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.add(sampleType_panel, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(jPanel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.add(sampleType_panel, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jPanel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jPanel2, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void closeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_closeActionPerformed
close();
}//GEN-LAST:event_closeActionPerformed
private void saveAndCloseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_saveAndCloseActionPerformed
try {
saveSampleData();
close();
} catch (ETException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}//GEN-LAST:event_saveAndCloseActionPerformed
private void sampleIGSN_textFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_sampleIGSN_textFocusLost
validateSampleID();
}//GEN-LAST:event_sampleIGSN_textFocusLost
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JRadioButton TWCalculateRho_radioBut;
private javax.swing.JRadioButton TWZeroRho_radioBut;
private javax.swing.ButtonGroup TWsource;
private javax.swing.JLabel aliquotName_label;
private javax.swing.JTextField aliquotName_text;
private javax.swing.JComboBox<String> analysisPurposeChooser;
private javax.swing.JRadioButton bulkMode_radioBut;
private javax.swing.JLabel chooseAnalysisPurpose_label;
private javax.swing.JLabel chooseStandardMineral_label;
private javax.swing.JLabel chooseTWrho_label;
private javax.swing.JButton close;
private javax.swing.JLabel defaultHeader_label;
private javax.swing.ButtonGroup destinationOfFractionsOptions_buttonGroup;
private javax.swing.JPanel fractionDestinationPanel_panel;
private javax.swing.JPanel fractionSourcePanel_panel;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JRadioButton manualMode_radioBut;
private javax.swing.ButtonGroup modeChooser_buttonGroup;
private javax.swing.JComboBox<String> physicalConstantsModelChooser;
private javax.swing.JLabel sampleIGSN_label;
private javax.swing.JLabel sampleIGSN_label1;
private javax.swing.JTextField sampleIGSN_text;
private javax.swing.JLabel sampleName_label;
private javax.swing.JTextField sampleName_text;
private javax.swing.JLabel sampleNotes_label;
private javax.swing.JScrollPane sampleNotes_scrollPane;
private javax.swing.JTextArea sampleNotes_textArea;
private javax.swing.JLabel sampleReduxFileName_label;
private javax.swing.JLabel sampleReduxFile_label;
private javax.swing.JComboBox<SampleRegistries> sampleRegistryChooser;
private javax.swing.JLabel sampleType_label;
private javax.swing.JPanel sampleType_panel;
private javax.swing.JButton saveAndClose;
private javax.swing.ButtonGroup sourceOfFractionsOptions_buttonGroup;
private javax.swing.JComboBox<String> standardMineralNameChooser;
private javax.swing.ButtonGroup updateMode_buttonGroup;
private javax.swing.JLabel validSampleID_label;
// End of variables declaration//GEN-END:variables
/**
* @return the converter
*/
public AbstractSampleImporterFromLegacyCSVFile getConverter () {
return converter;
}
/**
* @param converter the converter to set
*/
public void setConverter ( AbstractSampleImporterFromLegacyCSVFile converter ) {
this.converter = converter;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.hadoop.serialization.dto.mapping;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.elasticsearch.hadoop.EsHadoopIllegalArgumentException;
import org.elasticsearch.hadoop.serialization.FieldType;
@SuppressWarnings("serial")
public class Field implements Serializable {
private static final Field[] NO_FIELDS = new Field[0];
private final String name;
private final FieldType type;
private final Field[] properties;
public Field(String name, FieldType type) {
this(name, type, (Field[]) NO_FIELDS);
}
public Field(String name, FieldType type, Collection<Field> properties) {
this(name, type, (properties != null ? properties.toArray(new Field[properties.size()]) : NO_FIELDS));
}
Field(String name, FieldType type, Field[] properties) {
this.name = name;
this.type = type;
this.properties = properties;
}
public Field[] properties() { return properties; }
public FieldType type() {
return type;
}
public String name() { return name; }
public static Field parseField(Map<String, Object> content) {
Iterator<Entry<String, Object>> iterator = content.entrySet().iterator();
return (iterator.hasNext() ? parseField(iterator.next(), null) : null);
}
public Field skipHeaders() {
return skipHeaders(this);
}
public Map<String, FieldType> toLookupMap() {
return toLookupMap(this);
}
private static Field skipHeaders(Field field) {
Field[] props = field.properties();
// handle the common case of mapping by removing the first field (mapping.)
if (props[0] != null && "mappings".equals(props[0].name()) && FieldType.OBJECT.equals(props[0].type())) {
// followed by <type> (index/type) removal
return props[0].properties()[0];
}
return field;
}
/**
* Returns the associated fields with the given mapping. Handles removal of mappings/<type>
*
* @param field
* @return
*/
public static Map<String, FieldType> toLookupMap(Field field) {
if (field == null) {
return Collections.<String, FieldType> emptyMap();
}
Map<String, FieldType> map = new LinkedHashMap<String, FieldType>();
for (Field nestedField : skipHeaders(field).properties()) {
add(map, nestedField, null);
}
return map;
}
static void add(Map<String, FieldType> fields, Field field, String parentName) {
String fieldName = (parentName != null ? parentName + "." + field.name() : field.name());
fields.put(fieldName, field.type());
if (FieldType.OBJECT == field.type()) {
for (Field nestedField : field.properties()) {
add(fields, nestedField, fieldName);
}
}
}
@SuppressWarnings("unchecked")
private static Field parseField(Entry<String, Object> entry, String previousKey) {
// can be "type" or field name
String key = entry.getKey();
Object value = entry.getValue();
// nested object
if (value instanceof Map) {
Map<String, Object> content = (Map<String, Object>) value;
// check type first
Object type = content.get("type");
if (type instanceof String) {
String typeString = type.toString();
FieldType fieldType = FieldType.parse(typeString);
// handle multi_field separately
if (FieldType.MULTI_FIELD == fieldType) {
// get fields
Map<String, Object> fields = (Map<String, Object>) content.get("fields");
// return default field
Map<String, Object> defaultField = (Map<String, Object>) fields.get(key);
FieldType defaultType = null;
// check if there's no default field - corner case but is possible on 0.90
// if so, check the field types and if all are the same, use that
if (defaultField == null) {
String defaultFieldName = null;
for (Entry<String, Object> subfield : fields.entrySet()) {
Map<String, Object> subFieldDef = (Map<String, Object>) subfield.getValue();
FieldType subFieldType = FieldType.parse(subFieldDef.get("type").toString());
if (defaultType != null) {
if (defaultType != subFieldType) {
throw new EsHadoopIllegalArgumentException(
String.format("Ambiguous mapping, multi_field [%s] provides no default field and subfields have different mapping types [%s=%s], [%s=%s]",
key, defaultFieldName, defaultType, subfield.getKey(), subFieldType));
}
}
else {
defaultFieldName = subfield.getKey();
defaultType = subFieldType;
}
}
}
else {
defaultType = FieldType.parse(defaultField.get("type").toString());
}
return new Field(key, defaultType);
}
if (FieldType.isRelevant(fieldType)) {
return new Field(key, fieldType);
}
else {
return null;
}
}
// no type - iterate through types
List<Field> fields = new ArrayList<Field>(content.size());
for (Entry<String, Object> e : content.entrySet()) {
if (e.getValue() instanceof Map) {
Field fl = parseField(e, key);
if (fl != null && fl.type == FieldType.OBJECT && "properties".equals(fl.name)) {
return new Field(key, fl.type, fl.properties);
}
if (fl != null) {
fields.add(fl);
}
}
}
return new Field(key, FieldType.OBJECT, fields);
}
throw new EsHadoopIllegalArgumentException("invalid map received " + entry);
}
public String toString() {
return String.format("%s=%s", name, (type == FieldType.OBJECT ? Arrays.toString(properties) : type));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.dynamic_type;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TField;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.protocol.TProtocolUtil;
import org.apache.thrift.protocol.TType;
/**
* DynamicSerDeFieldList.
*
*/
public class DynamicSerDeFieldList extends DynamicSerDeSimpleNode implements
Serializable {
// private void writeObject(ObjectOutputStream out) throws IOException {
// out.writeObject(types_by_column_name);
// out.writeObject(ordered_types);
// }
// production: Field()*
// mapping of the fieldid to the field
private Map<Integer, DynamicSerDeTypeBase> types_by_id = null;
private Map<String, DynamicSerDeTypeBase> types_by_column_name = null;
private DynamicSerDeTypeBase[] ordered_types = null;
private Map<String, Integer> ordered_column_id_by_name = null;
public DynamicSerDeFieldList(int i) {
super(i);
}
public DynamicSerDeFieldList(thrift_grammar p, int i) {
super(p, i);
}
private DynamicSerDeField getField(int i) {
return (DynamicSerDeField) jjtGetChild(i);
}
public final DynamicSerDeField[] getChildren() {
int size = jjtGetNumChildren();
DynamicSerDeField[] result = new DynamicSerDeField[size];
for (int i = 0; i < size; i++) {
result[i] = (DynamicSerDeField) jjtGetChild(i);
}
return result;
}
private int getNumFields() {
return jjtGetNumChildren();
}
public void initialize() {
if (types_by_id == null) {
// multiple means of lookup
types_by_id = new HashMap<Integer, DynamicSerDeTypeBase>();
types_by_column_name = new HashMap<String, DynamicSerDeTypeBase>();
ordered_types = new DynamicSerDeTypeBase[jjtGetNumChildren()];
ordered_column_id_by_name = new HashMap<String, Integer>();
// put them in and also roll them up while we're at it
// a Field contains a FieldType which in turn contains a type
for (int i = 0; i < jjtGetNumChildren(); i++) {
DynamicSerDeField mt = getField(i);
DynamicSerDeTypeBase type = mt.getFieldType().getMyType();
// types get initialized in case they need to setup any
// internal data structures - e.g., DynamicSerDeStructBase
type.initialize();
type.fieldid = mt.fieldid;
type.name = mt.name;
types_by_id.put(Integer.valueOf(mt.fieldid), type);
types_by_column_name.put(mt.name, type);
ordered_types[i] = type;
ordered_column_id_by_name.put(mt.name, i);
}
}
}
private DynamicSerDeTypeBase getFieldByFieldId(int i) {
return types_by_id.get(i);
}
protected DynamicSerDeTypeBase getFieldByName(String fieldname) {
return types_by_column_name.get(fieldname);
}
/**
* Indicates whether fields can be out of order or missing. i.e., is it really
* real thrift serialization. This is used by dynamicserde to do some
* optimizations if it knows all the fields exist and are required and are
* serialized in order. For now, those optimizations are only done for
* DynamicSerDe serialized data so always set to false for now.
*/
protected boolean isRealThrift = false;
protected boolean[] fieldsPresent;
public Object deserialize(Object reuse, TProtocol iprot)
throws SerDeException, TException, IllegalAccessException {
ArrayList<Object> struct = null;
if (reuse == null) {
struct = new ArrayList<Object>(getNumFields());
for (DynamicSerDeTypeBase orderedType : ordered_types) {
struct.add(null);
}
} else {
struct = (ArrayList<Object>) reuse;
assert (struct.size() == ordered_types.length);
}
boolean fastSkips = iprot instanceof org.apache.hadoop.hive.serde2.thrift.SkippableTProtocol;
// may need to strip away the STOP marker when in thrift mode
boolean stopSeen = false;
if (fieldsPresent == null) {
fieldsPresent = new boolean[ordered_types.length];
}
Arrays.fill(fieldsPresent, false);
// Read the fields.
for (int i = 0; i < getNumFields(); i++) {
DynamicSerDeTypeBase mt = null;
TField field = null;
if (!isRealThrift && getField(i).isSkippable()) {
// PRE - all the fields are required and serialized in order - is
// !isRealThrift
mt = ordered_types[i];
if (fastSkips) {
((org.apache.hadoop.hive.serde2.thrift.SkippableTProtocol) iprot)
.skip(mt.getType());
} else {
TProtocolUtil.skip(iprot, mt.getType());
}
struct.set(i, null);
continue;
}
if (thrift_mode) {
field = iprot.readFieldBegin();
if (field.type >= 0) {
if (field.type == TType.STOP) {
stopSeen = true;
break;
}
mt = getFieldByFieldId(field.id);
if (mt == null) {
System.err.println("ERROR for fieldid: " + field.id
+ " system has no knowledge of this field which is of type : "
+ field.type);
TProtocolUtil.skip(iprot, field.type);
continue;
}
}
}
// field.type < 0 means that this is a faked Thrift field, e.g.,
// TControlSeparatedProtocol, which does not
// serialize the field id in the stream. As a result, the only way to get
// the field id is to fall back to
// the position "i".
// The intention of this hack (field.type < 0) is to make
// TControlSeparatedProtocol a real Thrift prototype,
// but there are a lot additional work to do to fulfill that, and that
// protocol inherently does not support
// versioning (adding/deleting fields).
int orderedId = -1;
if (!thrift_mode || field.type < 0) {
mt = ordered_types[i];
// We don't need to lookup order_column_id_by_name because we know it
// must be "i".
orderedId = i;
} else {
// Set the correct position
orderedId = ordered_column_id_by_name.get(mt.name);
}
struct.set(orderedId, mt.deserialize(struct.get(orderedId), iprot));
if (thrift_mode) {
iprot.readFieldEnd();
}
fieldsPresent[orderedId] = true;
}
for (int i = 0; i < ordered_types.length; i++) {
if (!fieldsPresent[i]) {
struct.set(i, null);
}
}
if (thrift_mode && !stopSeen) {
// strip off the STOP marker, which may be left if all the fields were in
// the serialization
iprot.readFieldBegin();
}
return struct;
}
TField field = new TField();
public void serialize(Object o, ObjectInspector oi, TProtocol oprot)
throws TException, SerDeException, NoSuchFieldException, IllegalAccessException {
// Assuming the ObjectInspector represents exactly the same type as this
// struct.
// This assumption should be checked during query compile time.
assert (oi instanceof StructObjectInspector);
StructObjectInspector soi = (StructObjectInspector) oi;
boolean writeNulls = oprot instanceof org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol;
// For every field
List<? extends StructField> fields = soi.getAllStructFieldRefs();
if (fields.size() != ordered_types.length) {
throw new SerDeException("Trying to serialize " + fields.size()
+ " fields into a struct with " + ordered_types.length + " object="
+ o + " objectinspector=" + oi.getTypeName());
}
for (int i = 0; i < fields.size(); i++) {
Object f = soi.getStructFieldData(o, fields.get(i));
DynamicSerDeTypeBase mt = ordered_types[i];
if (f == null && !writeNulls) {
continue;
}
if (thrift_mode) {
field = new TField(mt.name, mt.getType(), (short) mt.fieldid);
oprot.writeFieldBegin(field);
}
if (f == null) {
((org.apache.hadoop.hive.serde2.thrift.WriteNullsProtocol) oprot)
.writeNull();
} else {
mt.serialize(f, fields.get(i).getFieldObjectInspector(), oprot);
}
if (thrift_mode) {
oprot.writeFieldEnd();
}
}
if (thrift_mode) {
oprot.writeFieldStop();
}
}
@Override
public String toString() {
StringBuilder result = new StringBuilder();
String prefix = "";
for (DynamicSerDeField t : getChildren()) {
result.append(prefix + t.fieldid + ":"
+ t.getFieldType().getMyType().toString() + " " + t.name);
prefix = ",";
}
return result.toString();
}
}
| |
package com.sequenceiq.mock.swagger.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.sequenceiq.mock.swagger.model.ApiAuthRoleAuthority;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.*;
/**
* This is the model for auth role metadata
*/
@ApiModel(description = "This is the model for auth role metadata")
@Validated
@javax.annotation.Generated(value = "io.swagger.codegen.languages.SpringCodegen", date = "2021-12-10T21:24:30.629+01:00")
public class ApiAuthRoleMetadata {
@JsonProperty("displayName")
private String displayName = null;
@JsonProperty("uuid")
private String uuid = null;
@JsonProperty("role")
private String role = null;
@JsonProperty("authorities")
@Valid
private List<ApiAuthRoleAuthority> authorities = null;
@JsonProperty("allowedScopes")
@Valid
private List<String> allowedScopes = null;
public ApiAuthRoleMetadata displayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
*
* @return displayName
**/
@ApiModelProperty(value = "")
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public ApiAuthRoleMetadata uuid(String uuid) {
this.uuid = uuid;
return this;
}
/**
*
* @return uuid
**/
@ApiModelProperty(value = "")
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public ApiAuthRoleMetadata role(String role) {
this.role = role;
return this;
}
/**
*
* @return role
**/
@ApiModelProperty(value = "")
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public ApiAuthRoleMetadata authorities(List<ApiAuthRoleAuthority> authorities) {
this.authorities = authorities;
return this;
}
public ApiAuthRoleMetadata addAuthoritiesItem(ApiAuthRoleAuthority authoritiesItem) {
if (this.authorities == null) {
this.authorities = new ArrayList<>();
}
this.authorities.add(authoritiesItem);
return this;
}
/**
*
* @return authorities
**/
@ApiModelProperty(value = "")
@Valid
public List<ApiAuthRoleAuthority> getAuthorities() {
return authorities;
}
public void setAuthorities(List<ApiAuthRoleAuthority> authorities) {
this.authorities = authorities;
}
public ApiAuthRoleMetadata allowedScopes(List<String> allowedScopes) {
this.allowedScopes = allowedScopes;
return this;
}
public ApiAuthRoleMetadata addAllowedScopesItem(String allowedScopesItem) {
if (this.allowedScopes == null) {
this.allowedScopes = new ArrayList<>();
}
this.allowedScopes.add(allowedScopesItem);
return this;
}
/**
*
* @return allowedScopes
**/
@ApiModelProperty(value = "")
public List<String> getAllowedScopes() {
return allowedScopes;
}
public void setAllowedScopes(List<String> allowedScopes) {
this.allowedScopes = allowedScopes;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ApiAuthRoleMetadata apiAuthRoleMetadata = (ApiAuthRoleMetadata) o;
return Objects.equals(this.displayName, apiAuthRoleMetadata.displayName) &&
Objects.equals(this.uuid, apiAuthRoleMetadata.uuid) &&
Objects.equals(this.role, apiAuthRoleMetadata.role) &&
Objects.equals(this.authorities, apiAuthRoleMetadata.authorities) &&
Objects.equals(this.allowedScopes, apiAuthRoleMetadata.allowedScopes);
}
@Override
public int hashCode() {
return Objects.hash(displayName, uuid, role, authorities, allowedScopes);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ApiAuthRoleMetadata {\n");
sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n");
sb.append(" uuid: ").append(toIndentedString(uuid)).append("\n");
sb.append(" role: ").append(toIndentedString(role)).append("\n");
sb.append(" authorities: ").append(toIndentedString(authorities)).append("\n");
sb.append(" allowedScopes: ").append(toIndentedString(allowedScopes)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.kv.raft.net;
import com.google.common.base.Preconditions;
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.util.ArrayDeque;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* One TCP connection.
*
* <b>Locking</b>
*
* <p>
* All access to this class must be with the associated {@link TCPNetwork} instance locked.
*/
class Connection implements SelectorSupport.IOHandler {
/**
* Minimum buffer size to use a direct buffer.
*/
private static final int MIN_DIRECT_BUFFER_SIZE = 128;
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final TCPNetwork network;
private final String peer;
private final SocketChannel socketChannel;
private final SelectionKey selectionKey;
private final ArrayDeque<ByteBuffer> output = new ArrayDeque<>();
private ByteBuffer inbuf;
private long queueSize; // invariant: always equals the total number of bytes in 'output'
private long lastActiveTime;
private boolean readingLength; // indicates 'inbuf' is reading the message length (4 bytes)
private boolean closed;
// Constructors
public Connection(TCPNetwork network, String peer, SocketChannel socketChannel) throws IOException {
// Sanity check
Preconditions.checkArgument(network != null, "null network");
Preconditions.checkArgument(peer != null, "null peer");
Preconditions.checkArgument(socketChannel != null, "null socketChannel");
// Initialize
this.network = network;
this.peer = peer;
this.socketChannel = socketChannel;
this.lastActiveTime = System.nanoTime();
// Set up initial selection
this.selectionKey = this.network.createSelectionKey(this.socketChannel, this);
if (this.socketChannel.isConnectionPending())
this.network.selectFor(this.selectionKey, SelectionKey.OP_CONNECT, true);
else
this.network.selectFor(this.selectionKey, SelectionKey.OP_READ, true);
// Initialize input state
this.inbuf = ByteBuffer.allocate(4);
this.readingLength = true;
}
/**
* Get remote peer's identity.
*/
public String getPeer() {
return this.peer;
}
/**
* Get the associated {@link SocketChannel}.
*/
public SocketChannel getSocketChannel() {
return this.socketChannel;
}
/**
* Get time in milliseconds since last activity.
*/
public long getIdleTime() {
return (System.nanoTime() - this.lastActiveTime) / 1000000L;
}
/**
* Enqueue an outgoing message on this connection.
*
* @param buf outgoing data
* @return true if message was enqueued, false if output buffer was full
*/
public boolean output(ByteBuffer buf) {
// Sanity check
assert Thread.holdsLock(this.network);
Preconditions.checkArgument(buf != null, "null buf");
// Avoid anyone else mucking with my buffer position, etc.
buf = buf.asReadOnlyBuffer();
// Check output queue capacity
final int length = buf.remaining();
final int increment = length + 4;
if (this.queueSize + increment > this.network.getMaxOutputQueueSize())
return false;
// Add to queue
this.output.add((ByteBuffer)ByteBuffer.allocate(4).putInt(length).flip());
this.output.add(buf);
this.queueSize += increment;
// Notify us when socket is writable (unless still waiting on connection)
if (this.socketChannel.isConnected())
this.network.selectFor(this.selectionKey, SelectionKey.OP_WRITE, true);
// Done
this.lastActiveTime = System.nanoTime();
return true;
}
// Object
@Override
public String toString() {
return this.getClass().getSimpleName() + "[peer=" + this.peer + ",closed=" + this.closed + "]";
}
// IOHandler
@Override
public void serviceIO(SelectionKey key) throws IOException {
assert this.network.isServiceThread();
assert Thread.holdsLock(this.network);
if (key.isConnectable())
this.handleConnectable();
if (key.isReadable())
this.handleReadable();
if (key.isWritable())
this.handleWritable();
}
@Override
public void close(Throwable cause) {
assert Thread.holdsLock(this.network);
if (this.closed)
return;
this.closed = true;
if (this.log.isDebugEnabled())
this.log.debug("closing " + this + (cause != null ? " due to " + cause : ""));
try {
this.socketChannel.close();
} catch (IOException e) {
// ignore
}
this.network.handleConnectionClosed(this);
}
// I/O Ready Conditions
// Handle connection succeeded
private void handleConnectable() throws IOException {
// Leave connecting state
this.network.selectFor(this.selectionKey, SelectionKey.OP_CONNECT, false);
if (!this.socketChannel.finishConnect()) // this should never occur
throw new IOException("connection failed");
if (this.log.isDebugEnabled())
this.log.debug(this + ": connection succeeded");
// Notify us when readable/writeable
this.network.selectFor(this.selectionKey, SelectionKey.OP_READ, true);
this.network.selectFor(this.selectionKey, SelectionKey.OP_WRITE, !this.output.isEmpty());
// Update timestamp
this.lastActiveTime = System.nanoTime();
// Notify client we are open for business
this.network.handleOutputQueueEmpty(this);
}
private void handleReadable() throws IOException {
while (true) {
// Update timestamp
this.lastActiveTime = System.nanoTime();
// Read bytes
final long len = this.socketChannel.read(this.inbuf);
if (len == -1)
throw new EOFException("connection closed");
// Is the message (or length header) still incomplete?
if (this.inbuf.hasRemaining())
break;
// Set up for reading
this.inbuf.flip();
// Completed length header?
if (this.readingLength) {
// Get and validate length
assert this.inbuf.remaining() == 4;
final int length = this.inbuf.getInt();
if (length < 0 || length > this.network.getMaxMessageSize())
throw new IOException("rec'd message with bogus length " + length);
// Set up for reading the actual message
this.inbuf = length >= MIN_DIRECT_BUFFER_SIZE ? ByteBuffer.allocateDirect(length) : ByteBuffer.allocate(length);
this.readingLength = false;
continue;
}
// Deliver the completed message
this.network.handleMessage(this, this.inbuf);
// Set up for reading next length header
this.inbuf = ByteBuffer.allocate(4);
this.readingLength = true;
}
// Done
this.lastActiveTime = System.nanoTime();
}
private void handleWritable() throws IOException {
// Write more data, if present
boolean queueBecameEmpty = false;
if (!this.output.isEmpty()) {
// Write data
final long written = this.socketChannel.write(this.output.toArray(new ByteBuffer[this.output.size()]));
this.queueSize -= written;
// Clear away empty buffers
while (!this.output.isEmpty() && !this.output.peekFirst().hasRemaining())
this.output.removeFirst();
// Set flag if queue became empty
queueBecameEmpty = this.output.isEmpty();
}
// Notify when writeable - only if queue still not empty
this.network.selectFor(this.selectionKey, SelectionKey.OP_WRITE, !this.output.isEmpty());
// Update timestamp
this.lastActiveTime = System.nanoTime();
// Notify client if queue became empty
if (queueBecameEmpty)
this.network.handleOutputQueueEmpty(this);
}
// Housekeeping
// Check timeouts
void performHousekeeping() throws IOException {
assert Thread.holdsLock(this.network);
assert this.network.isServiceThread();
if (this.socketChannel.isConnectionPending()) {
if (this.getIdleTime() >= this.network.getConnectTimeout())
throw new IOException("connection unsuccessful after " + this.getIdleTime() + "ms");
} else {
if (this.getIdleTime() >= this.network.getMaxIdleTime())
throw new IOException("connection idle timeout after " + this.getIdleTime() + "ms");
}
}
}
| |
/*
* Jicofo, the Jitsi Conference Focus.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.jicofo.xmpp;
import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*;
import net.java.sip.communicator.util.*;
import net.java.sip.communicator.util.Logger;
import org.jitsi.impl.protocol.xmpp.extensions.*;
import org.jitsi.jicofo.*;
import org.jitsi.jicofo.auth.*;
import org.jitsi.jicofo.reservation.*;
import org.jitsi.meet.*;
import org.jitsi.service.configuration.*;
import org.jitsi.util.*;
import org.jitsi.xmpp.component.*;
import org.jitsi.xmpp.util.*;
import org.jivesoftware.smack.packet.*;
import org.osgi.framework.*;
import org.xmpp.component.*;
import org.xmpp.packet.IQ;
/**
* XMPP component that listens for {@link ConferenceIq}
* and allocates {@link org.jitsi.jicofo.JitsiMeetConference}s appropriately.
*
* @author Pawel Domas
*/
public class FocusComponent
extends ComponentBase
implements BundleActivator
{
/**
* The logger.
*/
private final static Logger logger
= Logger.getLogger(FocusComponent.class);
/**
* Name of system and configuration property which specifies the JID from
* which shutdown requests will be accepted.
*/
public static final String SHUTDOWN_ALLOWED_JID_PNAME
= "org.jitsi.jicofo.SHUTDOWN_ALLOWED_JID";
/**
* The JID from which shutdown request are accepted.
*/
private String shutdownAllowedJid;
/**
* Indicates if the focus is anonymous user or authenticated system admin.
*/
private final boolean isFocusAnonymous;
/**
* The JID of focus user that will enter the MUC room. Can be user to
* recognize real focus of the conference.
*/
private final String focusAuthJid;
/**
* The manager object that creates and expires
* {@link org.jitsi.jicofo.JitsiMeetConference}s.
*/
private FocusManager focusManager;
/**
* (Optional)Authentication authority used to verify user requests.
*/
private AuthenticationAuthority authAuthority;
/**
* (Optional)Reservation system that manages new rooms allocation.
* Requires authentication system in order to verify user's identity.
*/
private ReservationSystem reservationSystem;
/**
* Creates new instance of <tt>FocusComponent</tt>.
* @param host the hostname or IP address to which this component will be
* connected.
* @param port the port of XMPP server to which this component will connect.
* @param domain the name of main XMPP domain on which this component will
* be served.
* @param subDomain the name of subdomain on which this component will be
* available.
* @param secret the password used by the component to authenticate with
* XMPP server.
* @param anonymousFocus indicates if the focus user is anonymous.
* @param focusAuthJid the JID of authenticated focus user which will be
* advertised to conference participants.
*/
public FocusComponent(String host, int port,
String domain, String subDomain,
String secret,
boolean anonymousFocus, String focusAuthJid)
{
super(host, port, domain, subDomain, secret);
this.isFocusAnonymous = anonymousFocus;
this.focusAuthJid = focusAuthJid;
}
/**
* Initializes this component.
*/
public void init()
{
OSGi.start(this);
}
/**
* Method will be called by OSGi after {@link #init()} is called.
*/
@Override
public void start(BundleContext bc)
throws Exception
{
ConfigurationService configService
= ServiceUtils.getService(bc, ConfigurationService.class);
loadConfig(configService, "org.jitsi.jicofo");
if (!isPingTaskStarted())
startPingTask();
this.shutdownAllowedJid
= configService.getString(SHUTDOWN_ALLOWED_JID_PNAME);
authAuthority
= ServiceUtils.getService(bc, AuthenticationAuthority.class);
focusManager = ServiceUtils.getService(bc, FocusManager.class);
reservationSystem
= ServiceUtils.getService(bc, ReservationSystem.class);
}
/**
* Releases resources used by this instance.
*/
public void dispose()
{
OSGi.stop(this);
}
/**
* Methods will be invoked by OSGi after {@link #dispose()} is called.
*/
@Override
public void stop(BundleContext bundleContext)
throws Exception
{
authAuthority = null;
focusManager = null;
reservationSystem = null;
}
@Override
public String getDescription()
{
return "Manages Jitsi Meet conferences";
}
@Override
public String getName()
{
return "Jitsi Meet Focus";
}
/**
* {@inheritDoc}
*/
@Override
protected String[] discoInfoFeatureNamespaces()
{
return
new String[]
{
ConferenceIq.NAMESPACE
};
}
@Override
protected IQ handleIQGet(IQ iq)
throws Exception
{
try
{
org.jivesoftware.smack.packet.IQ smackIq = IQUtils.convert(iq);
if (smackIq instanceof ColibriStatsIQ)
{
// Reply with stats
ColibriStatsIQ statsReply = new ColibriStatsIQ();
statsReply.setType(
org.jivesoftware.smack.packet.IQ.Type.RESULT);
statsReply.setPacketID(iq.getID());
statsReply.setTo(iq.getFrom().toString());
int conferenceCount = focusManager.getConferenceCount();
// Return conference count
statsReply.addStat(
new ColibriStatsExtension.Stat(
"conferences",
Integer.toString(conferenceCount)));
statsReply.addStat(
new ColibriStatsExtension.Stat(
"graceful_shutdown",
focusManager.isShutdownInProgress()
? "true" : "false"));
return IQUtils.convert(statsReply);
}
else if (smackIq instanceof LoginUrlIQ)
{
org.jivesoftware.smack.packet.IQ result
= handleAuthUrlIq((LoginUrlIQ) smackIq);
return IQUtils.convert(result);
}
else
{
return super.handleIQGet(iq);
}
}
catch (Exception e)
{
logger.error(e, e);
throw e;
}
}
/**
* Handles an <tt>org.xmpp.packet.IQ</tt> stanza of type <tt>set</tt> which
* represents a request.
*
* @param iq the <tt>org.xmpp.packet.IQ</tt> stanza of type <tt>set</tt>
* which represents the request to handle
* @return an <tt>org.xmpp.packet.IQ</tt> stanza which represents the
* response to the specified request or <tt>null</tt> to reply with
* <tt>feature-not-implemented</tt>
* @throws Exception to reply with <tt>internal-server-error</tt> to the
* specified request
* @see AbstractComponent#handleIQSet(IQ)
*/
@Override
public IQ handleIQSet(IQ iq)
throws Exception
{
try
{
org.jivesoftware.smack.packet.IQ smackIq = IQUtils.convert(iq);
if (smackIq instanceof ConferenceIq)
{
org.jivesoftware.smack.packet.IQ response
= handleConferenceIq((ConferenceIq) smackIq);
return response != null ? IQUtils.convert(response) : null;
}
else if (smackIq instanceof ShutdownIQ)
{
ShutdownIQ gracefulShutdownIQ
= (ShutdownIQ) smackIq;
if (!gracefulShutdownIQ.isGracefulShutdown())
{
return IQUtils.convert(
org.jivesoftware.smack.packet.IQ.createErrorResponse(
smackIq,
new XMPPError(XMPPError.Condition.bad_request)));
}
String from = gracefulShutdownIQ.getFrom();
String bareFrom
= org.jivesoftware.smack.util.StringUtils
.parseBareAddress(from);
if (StringUtils.isNullOrEmpty(shutdownAllowedJid)
|| !shutdownAllowedJid.equals(bareFrom))
{
// Forbidden
XMPPError forbiddenError
= new XMPPError(XMPPError.Condition.forbidden);
logger.warn("Rejected shutdown request from: " + from);
return IQUtils.convert(
org.jivesoftware.smack.packet.IQ.createErrorResponse(
smackIq, forbiddenError));
}
logger.info("Accepted shutdown request from: " + from);
focusManager.enableGracefulShutdownMode();
return IQUtils.convert(
org.jivesoftware.smack.packet.IQ.createResultIQ(smackIq));
}
else if (smackIq instanceof LogoutIq)
{
logger.info("Logout IQ received: " + iq.toXML());
if (authAuthority == null)
{
// not-implemented
return null;
}
org.jivesoftware.smack.packet.IQ smackResult
= authAuthority.processLogoutIq((LogoutIq) smackIq);
return smackResult != null
? IQUtils.convert(smackResult) : null;
}
else
{
return super.handleIQSet(iq);
}
}
catch (Exception e)
{
logger.error(e, e);
throw e;
}
}
/**
* Additional logic added for conference IQ processing like
* authentication and room reservation.
*
* @param query <tt>ConferenceIq</tt> query
* @param response <tt>ConferenceIq</tt> response which can be modified
* during this processing.
* @param roomExists <tt>true</tt> if room mentioned in the <tt>query</tt>
* already exists.
*
* @return <tt>null</tt> if everything went ok or an error/response IQ
* which should be returned to the user
*/
public org.jivesoftware.smack.packet.IQ processExtensions(
ConferenceIq query, ConferenceIq response, boolean roomExists)
{
String peerJid = query.getFrom();
String identity = null;
// Authentication
if (authAuthority != null)
{
org.jivesoftware.smack.packet.IQ authErrorOrResponse
= authAuthority.processAuthentication(query, response);
// Checks if authentication module wants to cancel further
// processing and eventually returns it's response
if (authErrorOrResponse != null)
{
return authErrorOrResponse;
}
// Only authenticated users are allowed to create new rooms
if (!roomExists)
{
identity = authAuthority.getUserIdentity(peerJid);
if (identity == null)
{
// Error not authorized
return ErrorFactory.createNotAuthorizedError(query, null);
}
}
}
// Check room reservation?
if (!roomExists && reservationSystem != null)
{
String room = query.getRoom();
ReservationSystem.Result result
= reservationSystem.createConference(identity, room);
logger.info(
"Create room result: " + result + " for " + room);
if (result.getCode() != ReservationSystem.RESULT_OK)
{
return ErrorFactory
.createReservationError(query, result);
}
}
return null;
}
private org.jivesoftware.smack.packet.IQ handleConferenceIq(
ConferenceIq query)
throws Exception
{
ConferenceIq response = new ConferenceIq();
String room = query.getRoom();
logger.info("Focus request for room: " + room);
boolean roomExists = focusManager.getConference(room) != null;
if (focusManager.isShutdownInProgress() && !roomExists)
{
// Service unavailable
return ColibriConferenceIQ
.createGracefulShutdownErrorResponse(query);
}
// Authentication and reservations system logic
org.jivesoftware.smack.packet.IQ error
= processExtensions(query, response, roomExists);
if (error != null)
{
return error;
}
boolean ready
= focusManager.conferenceRequest(
room, query.getPropertiesMap());
if (!isFocusAnonymous && authAuthority == null)
{
// Focus is authenticated system admin, so we let them in
// immediately. Focus will get OWNER anyway.
ready = true;
}
response.setType(org.jivesoftware.smack.packet.IQ.Type.RESULT);
response.setPacketID(query.getPacketID());
response.setFrom(query.getTo());
response.setTo(query.getFrom());
response.setRoom(query.getRoom());
response.setReady(ready);
// Config
response.setFocusJid(focusAuthJid);
// Authentication module enabled?
response.addProperty(
new ConferenceIq.Property(
"authentication",
String.valueOf(authAuthority != null)));
if (authAuthority != null)
{
response.addProperty(
new ConferenceIq.Property(
"externalAuth",
String.valueOf(authAuthority.isExternal())));
}
if (focusManager.getJitsiMeetServices().getSipGateway() != null)
{
response.addProperty(
new ConferenceIq.Property("sipGatewayEnabled", "true"));
}
return response;
}
private org.jivesoftware.smack.packet.IQ handleAuthUrlIq(
LoginUrlIQ authUrlIq)
{
if (authAuthority == null)
{
XMPPError error
= new XMPPError(XMPPError.Condition.service_unavailable);
return org.jivesoftware.smack.packet.IQ
.createErrorResponse(authUrlIq, error);
}
String peerFullJid = authUrlIq.getFrom();
String roomName = authUrlIq.getRoom();
if (StringUtils.isNullOrEmpty(roomName))
{
XMPPError error = new XMPPError(XMPPError.Condition.no_acceptable);
return org.jivesoftware.smack.packet.IQ
.createErrorResponse(authUrlIq, error);
}
LoginUrlIQ result = new LoginUrlIQ();
result.setType(org.jivesoftware.smack.packet.IQ.Type.RESULT);
result.setPacketID(authUrlIq.getPacketID());
result.setTo(authUrlIq.getFrom());
boolean popup =
authUrlIq.getPopup() != null && authUrlIq.getPopup();
String machineUID = authUrlIq.getMachineUID();
if (StringUtils.isNullOrEmpty(machineUID))
{
XMPPError error
= new XMPPError(
XMPPError.Condition.bad_request,
"missing mandatory attribute 'machineUID'");
return org.jivesoftware.smack.packet.IQ
.createErrorResponse(authUrlIq, error);
}
String authUrl
= authAuthority.createLoginUrl(
machineUID, peerFullJid, roomName, popup);
result.setUrl(authUrl);
logger.info("Sending url: " + result.toXML());
return result;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android.relinker;
import com.facebook.buck.android.NdkCxxPlatform;
import com.facebook.buck.android.NdkCxxPlatforms.TargetCpuType;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxLink;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.graph.DirectedAcyclicGraph;
import com.facebook.buck.graph.TopologicalSort;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.model.Pair;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleDependencyVisitors;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.args.Arg;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
/**
* When linking shared libraries, by default, all symbols are exported from the library. In a
* particular application, though, many of those symbols may never be used. Ideally, in each apk,
* each shared library would only export the minimal set of symbols that are used by other libraries
* in the apk. This would allow the linker to remove any dead code within the library (the linker
* can strip all code that is unreachable from the set of exported symbols).
* <p/>
* The native relinker tries to remedy the situation. When enabled for an apk, the native relinker
* will take the set of libraries in the apk and relink them in reverse order telling the linker to
* only export those symbols that are referenced by a higher library.
*/
public class NativeRelinker {
private final BuildRuleParams buildRuleParams;
private final SourcePathResolver resolver;
private final CxxBuckConfig cxxBuckConfig;
private final ImmutableMap<Pair<TargetCpuType, String>, SourcePath> relinkedLibs;
private final ImmutableMap<Pair<TargetCpuType, String>, SourcePath> relinkedLibsAssets;
private ImmutableMap<TargetCpuType, NdkCxxPlatform> nativePlatforms;
private ImmutableList<RelinkerRule> rules;
public NativeRelinker(
BuildRuleParams buildRuleParams,
SourcePathResolver resolver,
CxxBuckConfig cxxBuckConfig,
ImmutableMap<TargetCpuType, NdkCxxPlatform> nativePlatforms,
ImmutableMap<Pair<TargetCpuType, String>, SourcePath> linkableLibs,
ImmutableMap<Pair<TargetCpuType, String>, SourcePath> linkableLibsAssets) {
Preconditions.checkArgument(
!linkableLibs.isEmpty() ||
!linkableLibsAssets.isEmpty(),
"There should be at least one native library to relink.");
this.buildRuleParams = buildRuleParams;
this.resolver = resolver;
this.cxxBuckConfig = cxxBuckConfig;
this.nativePlatforms = nativePlatforms;
/*
When relinking a library, any symbols needed by a (transitive) dependent must continue to be
exported. As relinking one of those dependents may change the set of symbols that it needs,
we only need to keep the symbols that are still used after a library is relinked. So, this
relinking process basically works in the reverse order of the original link process. As each
library is relinked, we now know the set of symbols that are needed in that library's
dependencies.
For linkables that can't be resolved to a BuildRule, we can't tell what libraries that one
depends on. So, we essentially assume that everything depends on it.
*/
ImmutableMap.Builder<BuildRule, Pair<TargetCpuType, SourcePath>> ruleMapBuilder =
ImmutableMap.builder();
ImmutableSet.Builder<Pair<TargetCpuType, SourcePath>> copiedLibraries = ImmutableSet.builder();
for (Map.Entry<Pair<TargetCpuType, String>, SourcePath> entry :
Iterables.concat(linkableLibs.entrySet(), linkableLibsAssets.entrySet())) {
SourcePath source = entry.getValue();
Optional<BuildRule> rule = resolver.getRule(source);
if (rule.isPresent()) {
ruleMapBuilder.put(rule.get(), new Pair<>(entry.getKey().getFirst(), source));
} else {
copiedLibraries.add(new Pair<>(entry.getKey().getFirst(), source));
}
}
ImmutableMap<BuildRule, Pair<TargetCpuType, SourcePath>> ruleMap = ruleMapBuilder.build();
ImmutableSet<BuildRule> linkableRules = ruleMap.keySet();
// Now, for every linkable build rule, we need to figure out all the other linkable build rules
// that could depend on it (or rather, could use symbols from it).
// This is the sub-graph that includes the linkableRules and all the dependents (including
// non-linkable rules).
final DirectedAcyclicGraph<BuildRule> graph = getBuildGraph(linkableRules);
ImmutableList<BuildRule> sortedRules =
TopologicalSort.sort(graph, x -> true);
// This maps a build rule to every rule in linkableRules that depends on it. This (added to the
// copied libraries) is the set of linkables that could use a symbol from this build rule.
ImmutableMap<BuildRule, ImmutableSet<BuildRule>> allDependentsMap =
getAllDependentsMap(linkableRules, graph, sortedRules);
ImmutableMap.Builder<SourcePath, SourcePath> pathMap = ImmutableMap.builder();
// Create the relinker rules for the libraries that couldn't be resolved back to a base rule.
ImmutableList.Builder<RelinkerRule> relinkRules = ImmutableList.builder();
for (Pair<TargetCpuType, SourcePath> p : copiedLibraries.build()) {
// TODO(cjhopman): We shouldn't really need a full RelinkerRule at this point. We know that we
// are just going to copy it, we could just leave these libraries in place and only calculate
// the list of needed symbols.
TargetCpuType cpuType = p.getFirst();
SourcePath source = p.getSecond();
RelinkerRule relink = makeRelinkerRule(cpuType, source, ImmutableList.of());
relinkRules.add(relink);
pathMap.put(source, relink.getLibFileSourcePath());
}
ImmutableList<RelinkerRule> copiedLibrariesRules = relinkRules.build();
// Process the remaining linkable rules in the reverse sorted order. This makes it easy to refer
// to the RelinkerRules of dependents.
Iterable<Pair<TargetCpuType, SourcePath>> sortedPaths =
FluentIterable.from(sortedRules)
.filter(linkableRules::contains)
.transform(Functions.forMap(ruleMap))
.toList()
.reverse();
Map<BuildRule, RelinkerRule> relinkerMap = new HashMap<>();
for (Pair<TargetCpuType, SourcePath> p : sortedPaths) {
TargetCpuType cpuType = p.getFirst();
SourcePath source = p.getSecond();
BuildRule baseRule = resolver.getRule(source).get();
// Relinking this library must keep any of the symbols needed by the libraries from the rules
// in relinkerDeps.
ImmutableList<RelinkerRule> relinkerDeps =
ImmutableList.<RelinkerRule>builder()
.addAll(copiedLibrariesRules)
.addAll(
Lists.transform(
ImmutableList.copyOf(allDependentsMap.get(baseRule)),
Functions.forMap(relinkerMap)))
.build();
RelinkerRule relink = makeRelinkerRule(cpuType, source, relinkerDeps);
relinkRules.add(relink);
pathMap.put(source, relink.getLibFileSourcePath());
relinkerMap.put(baseRule, relink);
}
Function<SourcePath, SourcePath> pathMapper = Functions.forMap(pathMap.build());
rules = relinkRules.build();
relinkedLibs = ImmutableMap.copyOf(
Maps.transformValues(linkableLibs, pathMapper));
relinkedLibsAssets = ImmutableMap.copyOf(
Maps.transformValues(linkableLibsAssets, pathMapper));
}
private static DirectedAcyclicGraph<BuildRule> getBuildGraph(Set<BuildRule> rules) {
// TODO(cjhopman): can this use .in(rules) instead of alwaysTrue()?
return BuildRuleDependencyVisitors.getBuildRuleDirectedGraphFilteredBy(
rules, x -> true, x -> true);
}
/**
* Creates a map from every BuildRule to the set of transitive dependents of that BuildRule that
* are in the linkableRules set.
*/
private ImmutableMap<BuildRule, ImmutableSet<BuildRule>> getAllDependentsMap(
Set<BuildRule> linkableRules,
DirectedAcyclicGraph<BuildRule> graph,
ImmutableList<BuildRule> sortedRules) {
final Map<BuildRule, ImmutableSet<BuildRule>> allDependentsMap = new HashMap<>();
// Using the sorted list of rules makes this calculation much simpler. We can just assume that
// we already know all the dependents of a rules incoming nodes when we are processing that
// rule.
for (BuildRule rule : sortedRules.reverse()) {
ImmutableSet.Builder<BuildRule> transitiveDependents = ImmutableSet.builder();
for (BuildRule dependent : graph.getIncomingNodesFor(rule)) {
transitiveDependents.addAll(allDependentsMap.get(dependent));
if (linkableRules.contains(dependent)) {
transitiveDependents.add(dependent);
}
}
allDependentsMap.put(rule, transitiveDependents.build());
}
return ImmutableMap.copyOf(allDependentsMap);
}
private RelinkerRule makeRelinkerRule(
TargetCpuType cpuType,
SourcePath source,
ImmutableList<RelinkerRule> relinkerDeps) {
Function<RelinkerRule, SourcePath> getSymbolsNeeded = RelinkerRule::getSymbolsNeededPath;
String libname = resolver.getAbsolutePath(source).getFileName().toString();
BuildRuleParams relinkerParams = buildRuleParams
.withFlavor(ImmutableFlavor.of("xdso-dce"))
.withFlavor(ImmutableFlavor.of(Flavor.replaceInvalidCharacters(cpuType.toString())))
.withFlavor(ImmutableFlavor.of(Flavor.replaceInvalidCharacters(libname)))
.appendExtraDeps(relinkerDeps);
BuildRule baseRule = resolver.getRule(source).orElse(null);
ImmutableList<Arg> linkerArgs = ImmutableList.of();
Linker linker = null;
if (baseRule != null && baseRule instanceof CxxLink) {
CxxLink link = (CxxLink) baseRule;
linkerArgs = link.getArgs();
linker = link.getLinker();
}
return new RelinkerRule(
relinkerParams,
resolver,
ImmutableSortedSet.copyOf(Lists.transform(relinkerDeps, getSymbolsNeeded)),
cpuType,
nativePlatforms.get(cpuType).getObjdump(),
cxxBuckConfig,
source,
linker != null,
linker,
linkerArgs);
}
public ImmutableMap<Pair<TargetCpuType, String>, SourcePath> getRelinkedLibs() {
return relinkedLibs;
}
public ImmutableMap<Pair<TargetCpuType, String>, SourcePath>
getRelinkedLibsAssets() {
return relinkedLibsAssets;
}
public ImmutableList<RelinkerRule> getRules() {
return rules;
}
}
| |
package org.jabref.gui;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.TimerTask;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.geometry.Orientation;
import javafx.scene.Node;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonBar;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.ProgressBar;
import javafx.scene.control.Separator;
import javafx.scene.control.SeparatorMenuItem;
import javafx.scene.control.SplitPane;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TextInputControl;
import javafx.scene.control.ToolBar;
import javafx.scene.control.Tooltip;
import javafx.scene.control.skin.TabPaneSkin;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.TransferMode;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Priority;
import javafx.stage.Stage;
import org.jabref.Globals;
import org.jabref.JabRefExecutorService;
import org.jabref.gui.actions.ActionFactory;
import org.jabref.gui.actions.Actions;
import org.jabref.gui.actions.OldDatabaseCommandWrapper;
import org.jabref.gui.actions.SimpleCommand;
import org.jabref.gui.actions.StandardActions;
import org.jabref.gui.auximport.NewSubLibraryAction;
import org.jabref.gui.bibtexextractor.ExtractBibtexAction;
import org.jabref.gui.bibtexkeypattern.BibtexKeyPatternAction;
import org.jabref.gui.contentselector.ManageContentSelectorAction;
import org.jabref.gui.copyfiles.CopyFilesAction;
import org.jabref.gui.customizefields.SetupGeneralFieldsAction;
import org.jabref.gui.dialogs.AutosaveUIManager;
import org.jabref.gui.documentviewer.ShowDocumentViewerAction;
import org.jabref.gui.duplicationFinder.DuplicateSearch;
import org.jabref.gui.edit.ManageKeywordsAction;
import org.jabref.gui.edit.MassSetFieldsAction;
import org.jabref.gui.edit.OpenBrowserAction;
import org.jabref.gui.exporter.ExportCommand;
import org.jabref.gui.exporter.ExportToClipboardAction;
import org.jabref.gui.exporter.ManageCustomExportsAction;
import org.jabref.gui.exporter.SaveAllAction;
import org.jabref.gui.exporter.SaveDatabaseAction;
import org.jabref.gui.externalfiles.AutoLinkFilesAction;
import org.jabref.gui.externalfiles.FindUnlinkedFilesAction;
import org.jabref.gui.externalfiletype.EditExternalFileTypesAction;
import org.jabref.gui.externalfiletype.ExternalFileTypes;
import org.jabref.gui.help.AboutAction;
import org.jabref.gui.help.ErrorConsoleAction;
import org.jabref.gui.help.HelpAction;
import org.jabref.gui.help.SearchForUpdateAction;
import org.jabref.gui.importer.ImportCommand;
import org.jabref.gui.importer.ImportEntriesDialog;
import org.jabref.gui.importer.ManageCustomImportsAction;
import org.jabref.gui.importer.NewDatabaseAction;
import org.jabref.gui.importer.NewEntryAction;
import org.jabref.gui.importer.actions.OpenDatabaseAction;
import org.jabref.gui.importer.fetcher.LookupIdentifierAction;
import org.jabref.gui.integrity.IntegrityCheckAction;
import org.jabref.gui.journals.ManageJournalsAction;
import org.jabref.gui.keyboard.CustomizeKeyBindingAction;
import org.jabref.gui.keyboard.KeyBinding;
import org.jabref.gui.libraryproperties.LibraryPropertiesAction;
import org.jabref.gui.menus.FileHistoryMenu;
import org.jabref.gui.mergeentries.MergeEntriesAction;
import org.jabref.gui.metadata.BibtexStringEditorAction;
import org.jabref.gui.metadata.PreambleEditor;
import org.jabref.gui.preferences.ShowPreferencesAction;
import org.jabref.gui.protectedterms.ManageProtectedTermsAction;
import org.jabref.gui.push.PushToApplicationAction;
import org.jabref.gui.push.PushToApplicationsManager;
import org.jabref.gui.search.GlobalSearchBar;
import org.jabref.gui.shared.ConnectToSharedDatabaseCommand;
import org.jabref.gui.specialfields.SpecialFieldMenuItemFactory;
import org.jabref.gui.texparser.ParseTexAction;
import org.jabref.gui.undo.CountingUndoManager;
import org.jabref.gui.util.BackgroundTask;
import org.jabref.gui.util.DefaultTaskExecutor;
import org.jabref.logic.autosaveandbackup.AutosaveManager;
import org.jabref.logic.autosaveandbackup.BackupManager;
import org.jabref.logic.importer.IdFetcher;
import org.jabref.logic.importer.OpenDatabase;
import org.jabref.logic.importer.ParserResult;
import org.jabref.logic.importer.WebFetchers;
import org.jabref.logic.l10n.Localization;
import org.jabref.logic.undo.AddUndoableActionEvent;
import org.jabref.logic.undo.UndoChangeEvent;
import org.jabref.logic.undo.UndoRedoEvent;
import org.jabref.logic.util.io.FileUtil;
import org.jabref.model.database.BibDatabaseContext;
import org.jabref.model.database.BibDatabaseMode;
import org.jabref.model.database.shared.DatabaseLocation;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.SpecialField;
import org.jabref.model.entry.field.StandardField;
import org.jabref.model.entry.types.StandardEntryType;
import org.jabref.preferences.JabRefPreferences;
import org.jabref.preferences.LastFocusedTabPreferences;
import com.google.common.eventbus.Subscribe;
import org.fxmisc.easybind.EasyBind;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The main window of the application.
*/
public class JabRefFrame extends BorderPane {
// Frame titles.
public static final String FRAME_TITLE = "JabRef";
private static final Logger LOGGER = LoggerFactory.getLogger(JabRefFrame.class);
private final SplitPane splitPane = new SplitPane();
private final JabRefPreferences prefs = Globals.prefs;
private final GlobalSearchBar globalSearchBar = new GlobalSearchBar(this, Globals.stateManager);
private final ProgressBar progressBar = new ProgressBar();
private final FileHistoryMenu fileHistory;
private final Stage mainStage;
private final StateManager stateManager;
private final CountingUndoManager undoManager;
private SidePaneManager sidePaneManager;
private TabPane tabbedPane;
private final PushToApplicationsManager pushToApplicationsManager;
private final DialogService dialogService;
private SidePane sidePane;
public JabRefFrame(Stage mainStage) {
this.mainStage = mainStage;
this.dialogService = new JabRefDialogService(mainStage, this);
this.stateManager = Globals.stateManager;
this.pushToApplicationsManager = new PushToApplicationsManager(dialogService, stateManager);
this.undoManager = Globals.undoManager;
this.fileHistory = new FileHistoryMenu(prefs, dialogService, getOpenDatabaseAction());
}
private static BasePanel getBasePanel(Tab tab) {
return (BasePanel) tab.getContent();
}
public void initDragAndDrop() {
Tab dndIndicator = new Tab(Localization.lang("Open files..."), null);
dndIndicator.getStyleClass().add("drop");
EasyBind.subscribe(tabbedPane.skinProperty(), skin -> {
if (!(skin instanceof TabPaneSkin)) {
return;
}
// We need to get the tab header, the following is a ugly workaround
Node tabHeaderArea = ((TabPaneSkin) this.tabbedPane.getSkin())
.getChildren()
.stream()
.filter(node -> node.getStyleClass().contains("tab-header-area"))
.findFirst()
.orElseThrow();
tabHeaderArea.setOnDragOver(event -> {
if (DragAndDropHelper.hasBibFiles(event.getDragboard())) {
event.acceptTransferModes(TransferMode.ANY);
if (!tabbedPane.getTabs().contains(dndIndicator)) {
tabbedPane.getTabs().add(dndIndicator);
}
event.consume();
} else {
tabbedPane.getTabs().remove(dndIndicator);
}
});
tabHeaderArea.setOnDragExited(event -> tabbedPane.getTabs().remove(dndIndicator));
tabHeaderArea.setOnDragDropped(event -> {
tabbedPane.getTabs().remove(dndIndicator);
boolean success = false;
List<Path> bibFiles = DragAndDropHelper.getBibFiles(event.getDragboard());
if (!bibFiles.isEmpty()) {
for (Path file : bibFiles) {
ParserResult pr = OpenDatabase.loadDatabase(file.toString(), Globals.prefs.getImportFormatPreferences(), Globals.getFileUpdateMonitor());
addParserResult(pr, true);
}
success = true;
}
event.setDropCompleted(success);
event.consume();
});
});
}
private void initKeyBindings() {
addEventFilter(KeyEvent.KEY_PRESSED, event -> {
Optional<KeyBinding> keyBinding = Globals.getKeyPrefs().mapToKeyBinding(event);
if (keyBinding.isPresent()) {
switch (keyBinding.get()) {
case FOCUS_ENTRY_TABLE:
getCurrentBasePanel().getMainTable().requestFocus();
event.consume();
break;
case NEXT_LIBRARY:
tabbedPane.getSelectionModel().selectNext();
event.consume();
break;
case PREVIOUS_LIBRARY:
tabbedPane.getSelectionModel().selectPrevious();
event.consume();
break;
case INCREASE_TABLE_FONT_SIZE:
increaseTableFontSize();
event.consume();
break;
case DECREASE_TABLE_FONT_SIZE:
decreaseTableFontSize();
event.consume();
break;
case DEFAULT_TABLE_FONT_SIZE:
setDefaultTableFontSize();
event.consume();
break;
case SEARCH:
getGlobalSearchBar().focus();
break;
default:
}
}
});
}
private void initShowTrackingNotification() {
if (!Globals.prefs.shouldAskToCollectTelemetry()) {
JabRefExecutorService.INSTANCE.submit(new TimerTask() {
@Override
public void run() {
DefaultTaskExecutor.runInJavaFXThread(JabRefFrame.this::showTrackingNotification);
}
}, 60000); // run in one minute
}
}
private Void showTrackingNotification() {
if (!Globals.prefs.shouldCollectTelemetry()) {
boolean shouldCollect = dialogService.showConfirmationDialogAndWait(
Localization.lang("Telemetry: Help make JabRef better"),
Localization.lang("To improve the user experience, we would like to collect anonymous statistics on the features you use. We will only record what features you access and how often you do it. We will neither collect any personal data nor the content of bibliographic items. If you choose to allow data collection, you can later disable it via Options -> Preferences -> General."),
Localization.lang("Share anonymous statistics"),
Localization.lang("Don't share"));
Globals.prefs.setShouldCollectTelemetry(shouldCollect);
}
Globals.prefs.askedToCollectTelemetry();
return null;
}
public void refreshTitleAndTabs() {
DefaultTaskExecutor.runInJavaFXThread(() -> {
setWindowTitle();
updateAllTabTitles();
});
}
/**
* Sets the title of the main window.
*/
public void setWindowTitle() {
BasePanel panel = getCurrentBasePanel();
// no database open
if (panel == null) {
//setTitle(FRAME_TITLE);
return;
}
String mode = panel.getBibDatabaseContext().getMode().getFormattedName();
String modeInfo = String.format(" (%s)", Localization.lang("%0 mode", mode));
boolean isAutosaveEnabled = Globals.prefs.getBoolean(JabRefPreferences.LOCAL_AUTO_SAVE);
if (panel.getBibDatabaseContext().getLocation() == DatabaseLocation.LOCAL) {
String changeFlag = panel.isModified() && !isAutosaveEnabled ? "*" : "";
String databaseFile = panel.getBibDatabaseContext()
.getDatabaseFile()
.map(File::getPath)
.orElse(GUIGlobals.UNTITLED_TITLE);
//setTitle(FRAME_TITLE + " - " + databaseFile + changeFlag + modeInfo);
} else if (panel.getBibDatabaseContext().getLocation() == DatabaseLocation.SHARED) {
//setTitle(FRAME_TITLE + " - " + panel.getBibDatabaseContext().getDBMSSynchronizer().getDBName() + " ["
// + Localization.lang("shared") + "]" + modeInfo);
}
}
/**
* The MacAdapter calls this method when a "BIB" file has been double-clicked from the Finder.
*/
public void openAction(String filePath) {
Path file = Paths.get(filePath);
// all the logic is done in openIt. Even raising an existing panel
getOpenDatabaseAction().openFile(file, true);
}
/**
* The MacAdapter calls this method when "About" is selected from the application menu.
*/
public void about() {
HelpAction.getMainHelpPageCommand().execute();
}
public JabRefPreferences prefs() {
return prefs;
}
/**
* Tears down all things started by JabRef
* <p>
* FIXME: Currently some threads remain and therefore hinder JabRef to be closed properly
*
* @param filenames the filenames of all currently opened files - used for storing them if prefs openLastEdited is
* set to true
*/
private void tearDownJabRef(List<String> filenames) {
//prefs.putBoolean(JabRefPreferences.WINDOW_MAXIMISED, getExtendedState() == Frame.MAXIMIZED_BOTH);
if (prefs.getBoolean(JabRefPreferences.OPEN_LAST_EDITED)) {
// Here we store the names of all current files. If
// there is no current file, we remove any
// previously stored filename.
if (filenames.isEmpty()) {
prefs.remove(JabRefPreferences.LAST_EDITED);
} else {
prefs.putStringList(JabRefPreferences.LAST_EDITED, filenames);
File focusedDatabase = getCurrentBasePanel().getBibDatabaseContext().getDatabaseFile().orElse(null);
new LastFocusedTabPreferences(prefs).setLastFocusedTab(focusedDatabase);
}
}
fileHistory.storeHistory();
prefs.flush();
}
/**
* General info dialog. The MacAdapter calls this method when "Quit" is selected from the application menu, Cmd-Q
* is pressed, or "Quit" is selected from the Dock. The function returns a boolean indicating if quitting is ok or
* not.
* <p>
* Non-OSX JabRef calls this when choosing "Quit" from the menu
* <p>
* SIDE EFFECT: tears down JabRef
*
* @return true if the user chose to quit; false otherwise
*/
public boolean quit() {
// First ask if the user really wants to close, if the library has not been saved since last save.
List<String> filenames = new ArrayList<>();
for (int i = 0; i < tabbedPane.getTabs().size(); i++) {
BasePanel panel = getBasePanelAt(i);
BibDatabaseContext context = panel.getBibDatabaseContext();
if (panel.isModified() && (context.getLocation() == DatabaseLocation.LOCAL)) {
tabbedPane.getSelectionModel().select(i);
if (!confirmClose(panel)) {
return false;
}
} else if (context.getLocation() == DatabaseLocation.SHARED) {
context.convertToLocalDatabase();
context.getDBMSSynchronizer().closeSharedDatabase();
context.clearDBMSSynchronizer();
}
AutosaveManager.shutdown(context);
BackupManager.shutdown(context);
context.getDatabaseFile().map(File::getAbsolutePath).ifPresent(filenames::add);
}
WaitForSaveFinishedDialog waitForSaveFinishedDialog = new WaitForSaveFinishedDialog(dialogService);
waitForSaveFinishedDialog.showAndWait(getBasePanelList());
// Good bye!
tearDownJabRef(filenames);
Platform.exit();
return true;
}
private void initLayout() {
setProgressBarVisible(false);
BorderPane head = new BorderPane();
head.setTop(createMenu());
head.setCenter(createToolbar());
setTop(head);
SplitPane.setResizableWithParent(sidePane, Boolean.FALSE);
splitPane.getItems().addAll(sidePane, tabbedPane);
// We need to wait with setting the divider since it gets reset a few times during the initial set-up
mainStage.showingProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean showing) {
if (showing) {
setDividerPosition();
EasyBind.subscribe(sidePane.visibleProperty(), visible -> {
if (visible) {
if (!splitPane.getItems().contains(sidePane)) {
splitPane.getItems().add(0, sidePane);
setDividerPosition();
}
} else {
splitPane.getItems().remove(sidePane);
}
});
mainStage.showingProperty().removeListener(this);
observable.removeListener(this);
}
}
});
setCenter(splitPane);
}
private void setDividerPosition() {
splitPane.setDividerPositions(prefs.getDouble(JabRefPreferences.SIDE_PANE_WIDTH));
if (!splitPane.getDividers().isEmpty()) {
EasyBind.subscribe(splitPane.getDividers().get(0).positionProperty(),
position -> prefs.putDouble(JabRefPreferences.SIDE_PANE_WIDTH, position.doubleValue()));
}
}
private Node createToolbar() {
Pane leftSpacer = new Pane();
HBox.setHgrow(leftSpacer, Priority.SOMETIMES);
Pane rightSpacer = new Pane();
HBox.setHgrow(rightSpacer, Priority.SOMETIMES);
ActionFactory factory = new ActionFactory(Globals.getKeyPrefs());
Button newLibrary;
if (Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE)) {
newLibrary = factory.createIconButton(StandardActions.NEW_LIBRARY_BIBLATEX, new NewDatabaseAction(this, BibDatabaseMode.BIBLATEX));
} else {
newLibrary = factory.createIconButton(StandardActions.NEW_LIBRARY_BIBTEX, new NewDatabaseAction(this, BibDatabaseMode.BIBTEX));
}
HBox leftSide = new HBox(
newLibrary,
factory.createIconButton(StandardActions.OPEN_LIBRARY, new OpenDatabaseAction(this)),
factory.createIconButton(StandardActions.SAVE_LIBRARY, new OldDatabaseCommandWrapper(Actions.SAVE, this, stateManager)),
leftSpacer
);
leftSide.setMinWidth(100);
leftSide.prefWidthProperty().bind(sidePane.widthProperty());
leftSide.maxWidthProperty().bind(sidePane.widthProperty());
final PushToApplicationAction pushToApplicationAction = getPushToApplicationsManager().getPushToApplicationAction();
final Button pushToApplicationButton = factory.createIconButton(pushToApplicationAction.getActionInformation(), pushToApplicationAction);
pushToApplicationsManager.setToolBarButton(pushToApplicationButton);
HBox rightSide = new HBox(
factory.createIconButton(StandardActions.NEW_ARTICLE, new NewEntryAction(this, StandardEntryType.Article, dialogService, Globals.prefs, stateManager)),
factory.createIconButton(StandardActions.NEW_ENTRY, new NewEntryAction(this, dialogService, Globals.prefs, stateManager)),
factory.createIconButton(StandardActions.DELETE_ENTRY, new OldDatabaseCommandWrapper(Actions.DELETE, this, stateManager)),
new Separator(Orientation.VERTICAL),
factory.createIconButton(StandardActions.UNDO, new OldDatabaseCommandWrapper(Actions.UNDO, this, stateManager)),
factory.createIconButton(StandardActions.REDO, new OldDatabaseCommandWrapper(Actions.REDO, this, stateManager)),
factory.createIconButton(StandardActions.CUT, new OldDatabaseCommandWrapper(Actions.CUT, this, stateManager)),
factory.createIconButton(StandardActions.COPY, new OldDatabaseCommandWrapper(Actions.COPY, this, stateManager)),
factory.createIconButton(StandardActions.PASTE, new OldDatabaseCommandWrapper(Actions.PASTE, this, stateManager)),
new Separator(Orientation.VERTICAL),
pushToApplicationButton,
factory.createIconButton(StandardActions.GENERATE_CITE_KEYS, new OldDatabaseCommandWrapper(Actions.MAKE_KEY, this, stateManager)),
factory.createIconButton(StandardActions.CLEANUP_ENTRIES, new OldDatabaseCommandWrapper(Actions.CLEANUP, this, stateManager)),
new Separator(Orientation.VERTICAL),
factory.createIconButton(StandardActions.FORK_ME, new OpenBrowserAction("https://github.com/JabRef/jabref")),
factory.createIconButton(StandardActions.OPEN_FACEBOOK, new OpenBrowserAction("https://www.facebook.com/JabRef/")),
factory.createIconButton(StandardActions.OPEN_TWITTER, new OpenBrowserAction("https://twitter.com/jabref_org"))
);
HBox.setHgrow(globalSearchBar, Priority.ALWAYS);
ToolBar toolBar = new ToolBar(
leftSide,
globalSearchBar,
rightSpacer,
rightSide);
toolBar.getStyleClass().add("mainToolbar");
return toolBar;
}
/**
* Returns the indexed BasePanel.
*
* @param i Index of base
*/
public BasePanel getBasePanelAt(int i) {
return (BasePanel) tabbedPane.getTabs().get(i).getContent();
}
/**
* Returns a list of BasePanel.
*/
public List<BasePanel> getBasePanelList() {
List<BasePanel> returnList = new ArrayList<>();
for (int i = 0; i < getBasePanelCount(); i++) {
returnList.add(getBasePanelAt(i));
}
return returnList;
}
public void showBasePanelAt(int i) {
tabbedPane.getSelectionModel().select(i);
}
public void showBasePanel(BasePanel bp) {
tabbedPane.getSelectionModel().select(getTab(bp));
}
public void init() {
sidePaneManager = new SidePaneManager(Globals.prefs, this);
sidePane = sidePaneManager.getPane();
tabbedPane = new TabPane();
tabbedPane.setTabDragPolicy(TabPane.TabDragPolicy.REORDER);
initLayout();
initKeyBindings();
initDragAndDrop();
//setBounds(GraphicsEnvironment.getLocalGraphicsEnvironment().getMaximumWindowBounds());
//WindowLocation pw = new WindowLocation(this, JabRefPreferences.POS_X, JabRefPreferences.POS_Y, JabRefPreferences.SIZE_X,
// JabRefPreferences.SIZE_Y);
//pw.displayWindowAtStoredLocation();
// Bind global state
stateManager.activeDatabaseProperty().bind(
EasyBind.map(tabbedPane.getSelectionModel().selectedItemProperty(),
tab -> Optional.ofNullable(tab).map(JabRefFrame::getBasePanel).map(BasePanel::getBibDatabaseContext)));
// Subscribe to the search
EasyBind.subscribe(stateManager.activeSearchQueryProperty(),
query -> {
if (getCurrentBasePanel() != null) {
getCurrentBasePanel().setCurrentSearchQuery(query);
}
});
/*
* The following state listener makes sure focus is registered with the
* correct database when the user switches tabs. Without this,
* cut/paste/copy operations would some times occur in the wrong tab.
*/
EasyBind.subscribe(tabbedPane.getSelectionModel().selectedItemProperty(), tab -> {
if (tab == null) {
return;
}
BasePanel newBasePanel = getBasePanel(tab);
// Poor-mans binding to global state
stateManager.setSelectedEntries(newBasePanel.getSelectedEntries());
// Update active search query when switching between databases
stateManager.activeSearchQueryProperty().set(newBasePanel.getCurrentSearchQuery());
// groupSidePane.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(GroupSidePane.class));
//previewToggle.setSelected(Globals.prefs.getPreviewPreferences().isPreviewPanelEnabled());
//generalFetcher.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(WebSearchPane.class));
//openOfficePanel.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(OpenOfficeSidePanel.class));
setWindowTitle();
// Update search autocompleter with information for the correct database:
newBasePanel.updateSearchManager();
newBasePanel.getUndoManager().postUndoRedoEvent();
newBasePanel.getMainTable().requestFocus();
});
initShowTrackingNotification();
}
/**
* Returns the currently viewed BasePanel.
*/
public BasePanel getCurrentBasePanel() {
if ((tabbedPane == null) || (tabbedPane.getSelectionModel().getSelectedItem() == null)) {
return null;
}
return getBasePanel(tabbedPane.getSelectionModel().getSelectedItem());
}
/**
* @return the BasePanel count.
*/
public int getBasePanelCount() {
return tabbedPane.getTabs().size();
}
private Tab getTab(BasePanel comp) {
for (Tab tab : tabbedPane.getTabs()) {
if (tab.getContent() == comp) {
return tab;
}
}
return null;
}
/**
* @deprecated do not operate on tabs but on BibDatabaseContexts
*/
@Deprecated
public TabPane getTabbedPane() {
return tabbedPane;
}
public void setTabTitle(BasePanel comp, String title, String toolTip) {
DefaultTaskExecutor.runInJavaFXThread(() -> {
Tab tab = getTab(comp);
tab.setText(title);
tab.setTooltip(new Tooltip(toolTip));
});
}
private MenuBar createMenu() {
ActionFactory factory = new ActionFactory(Globals.getKeyPrefs());
Menu file = new Menu(Localization.lang("File"));
Menu edit = new Menu(Localization.lang("Edit"));
Menu library = new Menu(Localization.lang("Library"));
Menu quality = new Menu(Localization.lang("Quality"));
Menu view = new Menu(Localization.lang("View"));
Menu tools = new Menu(Localization.lang("Tools"));
Menu options = new Menu(Localization.lang("Options"));
Menu help = new Menu(Localization.lang("Help"));
file.getItems().addAll(
factory.createSubMenu(StandardActions.NEW_LIBRARY,
factory.createMenuItem(StandardActions.NEW_LIBRARY_BIBTEX, new NewDatabaseAction(this, BibDatabaseMode.BIBTEX)),
factory.createMenuItem(StandardActions.NEW_LIBRARY_BIBLATEX, new NewDatabaseAction(this, BibDatabaseMode.BIBLATEX))),
factory.createMenuItem(StandardActions.OPEN_LIBRARY, getOpenDatabaseAction()),
fileHistory,
factory.createMenuItem(StandardActions.SAVE_LIBRARY, new OldDatabaseCommandWrapper(Actions.SAVE, this, stateManager)),
factory.createMenuItem(StandardActions.SAVE_LIBRARY_AS, new OldDatabaseCommandWrapper(Actions.SAVE_AS, this, stateManager)),
factory.createMenuItem(StandardActions.SAVE_ALL, new SaveAllAction(this)),
new SeparatorMenuItem(),
factory.createSubMenu(StandardActions.IMPORT,
factory.createMenuItem(StandardActions.MERGE_DATABASE, new OldDatabaseCommandWrapper(Actions.MERGE_DATABASE, this, stateManager)), // TODO: merge with import
factory.createMenuItem(StandardActions.IMPORT_INTO_CURRENT_LIBRARY, new ImportCommand(this, false)),
factory.createMenuItem(StandardActions.IMPORT_INTO_NEW_LIBRARY, new ImportCommand(this, true))),
factory.createSubMenu(StandardActions.EXPORT,
factory.createMenuItem(StandardActions.EXPORT_ALL, new ExportCommand(this, false, Globals.prefs)),
factory.createMenuItem(StandardActions.EXPORT_SELECTED, new ExportCommand(this, true, Globals.prefs)),
factory.createMenuItem(StandardActions.SAVE_SELECTED_AS_PLAIN_BIBTEX, new OldDatabaseCommandWrapper(Actions.SAVE_SELECTED_AS_PLAIN, this, stateManager))),
factory.createMenuItem(StandardActions.CONNECT_TO_SHARED_DB, new ConnectToSharedDatabaseCommand(this)),
factory.createMenuItem(StandardActions.PULL_CHANGES_FROM_SHARED_DB, new OldDatabaseCommandWrapper(Actions.PULL_CHANGES_FROM_SHARED_DATABASE, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.CLOSE_LIBRARY, new CloseDatabaseAction()),
factory.createMenuItem(StandardActions.QUIT, new CloseAction())
);
edit.getItems().addAll(
factory.createMenuItem(StandardActions.UNDO, new OldDatabaseCommandWrapper(Actions.UNDO, this, stateManager)),
factory.createMenuItem(StandardActions.REDO, new OldDatabaseCommandWrapper(Actions.REDO, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.CUT, new EditAction(Actions.CUT)),
factory.createMenuItem(StandardActions.COPY, new EditAction(Actions.COPY)),
factory.createSubMenu(StandardActions.COPY_MORE,
factory.createMenuItem(StandardActions.COPY_TITLE, new OldDatabaseCommandWrapper(Actions.COPY_TITLE, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_KEY, new OldDatabaseCommandWrapper(Actions.COPY_KEY, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_CITE_KEY, new OldDatabaseCommandWrapper(Actions.COPY_CITE_KEY, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_KEY_AND_TITLE, new OldDatabaseCommandWrapper(Actions.COPY_KEY_AND_TITLE, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_KEY_AND_LINK, new OldDatabaseCommandWrapper(Actions.COPY_KEY_AND_LINK, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_CITATION_PREVIEW, new OldDatabaseCommandWrapper(Actions.COPY_CITATION_HTML, this, stateManager)),
factory.createMenuItem(StandardActions.EXPORT_SELECTED_TO_CLIPBOARD, new ExportToClipboardAction(this, dialogService))),
factory.createMenuItem(StandardActions.PASTE, new EditAction(Actions.PASTE)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.MANAGE_KEYWORDS, new ManageKeywordsAction(stateManager))
);
if (Globals.prefs.getBoolean(JabRefPreferences.SPECIALFIELDSENABLED)) {
edit.getItems().addAll(
SpecialFieldMenuItemFactory.createSpecialFieldMenuForActiveDatabase(SpecialField.RANKING, factory, undoManager),
SpecialFieldMenuItemFactory.getSpecialFieldSingleItemForActiveDatabase(SpecialField.RELEVANCE, factory),
SpecialFieldMenuItemFactory.getSpecialFieldSingleItemForActiveDatabase(SpecialField.QUALITY, factory),
SpecialFieldMenuItemFactory.getSpecialFieldSingleItemForActiveDatabase(SpecialField.PRINTED, factory),
SpecialFieldMenuItemFactory.createSpecialFieldMenuForActiveDatabase(SpecialField.PRIORITY, factory, undoManager),
SpecialFieldMenuItemFactory.createSpecialFieldMenuForActiveDatabase(SpecialField.READ_STATUS, factory, undoManager),
new SeparatorMenuItem()
);
}
//@formatter:off
library.getItems().addAll(
factory.createMenuItem(StandardActions.NEW_ENTRY, new NewEntryAction(this, dialogService, Globals.prefs, stateManager)),
factory.createMenuItem(StandardActions.DELETE_ENTRY, new OldDatabaseCommandWrapper(Actions.DELETE, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.LIBRARY_PROPERTIES, new LibraryPropertiesAction(this, dialogService, stateManager)),
factory.createMenuItem(StandardActions.EDIT_PREAMBLE, new PreambleEditor(stateManager, undoManager, this.getDialogService())),
factory.createMenuItem(StandardActions.EDIT_STRINGS, new BibtexStringEditorAction(stateManager)),
factory.createMenuItem(StandardActions.MANAGE_CITE_KEY_PATTERNS, new BibtexKeyPatternAction(this, stateManager)),
factory.createMenuItem(StandardActions.MASS_SET_FIELDS, new MassSetFieldsAction(stateManager, dialogService, undoManager))
);
Menu lookupIdentifiers = factory.createSubMenu(StandardActions.LOOKUP_DOC_IDENTIFIER);
for (IdFetcher<?> fetcher : WebFetchers.getIdFetchers(Globals.prefs.getImportFormatPreferences())) {
LookupIdentifierAction<?> identifierAction = new LookupIdentifierAction<>(this, fetcher, stateManager, undoManager);
lookupIdentifiers.getItems().add(factory.createMenuItem(identifierAction.getAction(), identifierAction));
}
quality.getItems().addAll(
factory.createMenuItem(StandardActions.FIND_DUPLICATES, new DuplicateSearch(this, dialogService, stateManager)),
factory.createMenuItem(StandardActions.MERGE_ENTRIES, new MergeEntriesAction(this, stateManager)),
factory.createMenuItem(StandardActions.CHECK_INTEGRITY, new IntegrityCheckAction(this, stateManager, Globals.TASK_EXECUTOR)),
factory.createMenuItem(StandardActions.CLEANUP_ENTRIES, new OldDatabaseCommandWrapper(Actions.CLEANUP, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.SET_FILE_LINKS, new AutoLinkFilesAction(this, prefs, stateManager, undoManager, Globals.TASK_EXECUTOR))
);
// PushToApplication
final PushToApplicationAction pushToApplicationAction = pushToApplicationsManager.getPushToApplicationAction();
final MenuItem pushToApplicationMenuItem = factory.createMenuItem(pushToApplicationAction.getActionInformation(), pushToApplicationAction);
pushToApplicationsManager.setMenuItem(pushToApplicationMenuItem);
tools.getItems().addAll(
factory.createMenuItem(StandardActions.PARSE_TEX, new ParseTexAction(stateManager)),
factory.createMenuItem(StandardActions.NEW_SUB_LIBRARY_FROM_AUX, new NewSubLibraryAction(this, stateManager)),
factory.createMenuItem(StandardActions.FIND_UNLINKED_FILES, new FindUnlinkedFilesAction(this, stateManager)),
factory.createMenuItem(StandardActions.WRITE_XMP, new OldDatabaseCommandWrapper(Actions.WRITE_XMP, this, stateManager)),
factory.createMenuItem(StandardActions.COPY_LINKED_FILES, new CopyFilesAction(stateManager, this.getDialogService())),
factory.createMenuItem(StandardActions.EXTRACT_BIBTEX, new ExtractBibtexAction(stateManager)),
new SeparatorMenuItem(),
lookupIdentifiers,
factory.createMenuItem(StandardActions.DOWNLOAD_FULL_TEXT, new OldDatabaseCommandWrapper(Actions.DOWNLOAD_FULL_TEXT, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.GENERATE_CITE_KEYS, new OldDatabaseCommandWrapper(Actions.MAKE_KEY, this, stateManager)),
factory.createMenuItem(StandardActions.REPLACE_ALL, new OldDatabaseCommandWrapper(Actions.REPLACE_ALL, this, stateManager)),
factory.createMenuItem(StandardActions.SEND_AS_EMAIL, new OldDatabaseCommandWrapper(Actions.SEND_AS_EMAIL, this, stateManager)),
pushToApplicationMenuItem,
factory.createSubMenu(StandardActions.ABBREVIATE,
factory.createMenuItem(StandardActions.ABBREVIATE_ISO, new OldDatabaseCommandWrapper(Actions.ABBREVIATE_ISO, this, stateManager)),
factory.createMenuItem(StandardActions.ABBREVIATE_MEDLINE, new OldDatabaseCommandWrapper(Actions.ABBREVIATE_MEDLINE, this, stateManager))),
factory.createMenuItem(StandardActions.UNABBREVIATE, new OldDatabaseCommandWrapper(Actions.UNABBREVIATE, this, stateManager))
);
SidePaneComponent webSearch = sidePaneManager.getComponent(SidePaneType.WEB_SEARCH);
SidePaneComponent groups = sidePaneManager.getComponent(SidePaneType.GROUPS);
SidePaneComponent openOffice = sidePaneManager.getComponent(SidePaneType.OPEN_OFFICE);
view.getItems().add(new SeparatorMenuItem());
view.setOnShowing(event -> {
view.getItems().clear();
view.getItems().addAll(
factory.createCheckMenuItem(webSearch.getToggleAction(), webSearch.getToggleCommand(), sidePaneManager.isComponentVisible(SidePaneType.WEB_SEARCH)),
factory.createCheckMenuItem(groups.getToggleAction(), groups.getToggleCommand(), sidePaneManager.isComponentVisible(SidePaneType.GROUPS)),
factory.createCheckMenuItem(openOffice.getToggleAction(), openOffice.getToggleCommand(), sidePaneManager.isComponentVisible(SidePaneType.OPEN_OFFICE)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.NEXT_PREVIEW_STYLE, new OldDatabaseCommandWrapper(Actions.NEXT_PREVIEW_STYLE, this, stateManager)),
factory.createMenuItem(StandardActions.PREVIOUS_PREVIEW_STYLE, new OldDatabaseCommandWrapper(Actions.PREVIOUS_PREVIEW_STYLE, this, stateManager)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.SHOW_PDF_VIEWER, new ShowDocumentViewerAction()),
factory.createMenuItem(StandardActions.EDIT_ENTRY, new OldDatabaseCommandWrapper(Actions.EDIT, this, stateManager)),
factory.createMenuItem(StandardActions.OPEN_CONSOLE, new OldDatabaseCommandWrapper(Actions.OPEN_CONSOLE, this, stateManager))
);
});
options.getItems().addAll(
factory.createMenuItem(StandardActions.SHOW_PREFS, new ShowPreferencesAction(this, Globals.TASK_EXECUTOR)),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.SETUP_GENERAL_FIELDS, new SetupGeneralFieldsAction()),
factory.createMenuItem(StandardActions.MANAGE_CUSTOM_IMPORTS, new ManageCustomImportsAction()),
factory.createMenuItem(StandardActions.MANAGE_CUSTOM_EXPORTS, new ManageCustomExportsAction()),
factory.createMenuItem(StandardActions.MANAGE_EXTERNAL_FILETYPES, new EditExternalFileTypesAction()),
factory.createMenuItem(StandardActions.MANAGE_JOURNALS, new ManageJournalsAction()),
factory.createMenuItem(StandardActions.CUSTOMIZE_KEYBINDING, new CustomizeKeyBindingAction()),
factory.createMenuItem(StandardActions.MANAGE_PROTECTED_TERMS, new ManageProtectedTermsAction()),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.MANAGE_CONTENT_SELECTORS, new ManageContentSelectorAction(this, stateManager))
// TODO: Reenable customize entry types feature (https://github.com/JabRef/jabref/issues/4719)
//factory.createMenuItem(StandardActions.CUSTOMIZE_ENTRY_TYPES, new CustomizeEntryAction(this)),
);
help.getItems().addAll(
factory.createMenuItem(StandardActions.HELP, HelpAction.getMainHelpPageCommand()),
factory.createMenuItem(StandardActions.OPEN_FORUM, new OpenBrowserAction("http://discourse.jabref.org/")),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.ERROR_CONSOLE, new ErrorConsoleAction()),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.SEARCH_FOR_UPDATES, new SearchForUpdateAction(Globals.BUILD_INFO, prefs.getVersionPreferences(), dialogService, Globals.TASK_EXECUTOR)),
factory.createSubMenu(StandardActions.WEB_MENU,
factory.createMenuItem(StandardActions.OPEN_WEBPAGE, new OpenBrowserAction("https://jabref.org/")),
factory.createMenuItem(StandardActions.OPEN_BLOG, new OpenBrowserAction("https://blog.jabref.org/")),
factory.createMenuItem(StandardActions.OPEN_FACEBOOK, new OpenBrowserAction("https://www.facebook.com/JabRef/")),
factory.createMenuItem(StandardActions.OPEN_TWITTER, new OpenBrowserAction("https://twitter.com/jabref_org")),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.FORK_ME, new OpenBrowserAction("https://github.com/JabRef/jabref")),
factory.createMenuItem(StandardActions.OPEN_DEV_VERSION_LINK, new OpenBrowserAction("https://builds.jabref.org/master/")),
factory.createMenuItem(StandardActions.OPEN_CHANGELOG, new OpenBrowserAction("https://github.com/JabRef/jabref/blob/master/CHANGELOG.md")),
new SeparatorMenuItem(),
factory.createMenuItem(StandardActions.DONATE, new OpenBrowserAction("https://donations.jabref.org"))
),
factory.createMenuItem(StandardActions.ABOUT, new AboutAction())
);
//@formatter:on
MenuBar menu = new MenuBar();
menu.getStyleClass().add("mainMenu");
menu.getMenus().addAll(
file,
edit,
library,
quality,
tools,
view,
options,
help);
menu.setUseSystemMenuBar(true);
return menu;
}
public void addParserResult(ParserResult pr, boolean focusPanel) {
if (pr.toOpenTab()) {
// Add the entries to the open tab.
BasePanel panel = getCurrentBasePanel();
if (panel == null) {
// There is no open tab to add to, so we create a new tab:
addTab(pr.getDatabaseContext(), focusPanel);
} else {
List<BibEntry> entries = new ArrayList<>(pr.getDatabase().getEntries());
addImportedEntries(panel, entries);
}
} else {
// only add tab if DB is not already open
Optional<BasePanel> panel = getBasePanelList().stream()
.filter(p -> p.getBibDatabaseContext().getDatabasePath().equals(pr.getFile()))
.findFirst();
if (panel.isPresent()) {
tabbedPane.getSelectionModel().select(getTab(panel.get()));
} else {
addTab(pr.getDatabaseContext(), focusPanel);
}
}
}
/**
* This method causes all open BasePanels to set up their tables anew. When called from PrefsDialog3, this updates
* to the new settings.
*/
public void setupAllTables() {
// This action can be invoked without an open database, so
// we have to check if we have one before trying to invoke
// methods to execute changes in the preferences.
// We want to notify all tabs about the changes to
// avoid problems when changing the column set.
for (int i = 0; i < tabbedPane.getTabs().size(); i++) {
BasePanel bf = getBasePanelAt(i);
// Update tables:
if (bf.getDatabase() != null) {
DefaultTaskExecutor.runInJavaFXThread(bf::setupMainPanel);
}
}
}
private List<String> collectDatabaseFilePaths() {
List<String> dbPaths = new ArrayList<>(getBasePanelCount());
for (BasePanel basePanel : getBasePanelList()) {
try {
// db file exists
if (basePanel.getBibDatabaseContext().getDatabaseFile().isPresent()) {
dbPaths.add(basePanel.getBibDatabaseContext().getDatabaseFile().get().getCanonicalPath());
} else {
dbPaths.add("");
}
} catch (IOException ex) {
LOGGER.error("Invalid database file path: " + ex.getMessage());
}
}
return dbPaths;
}
private List<String> getUniquePathParts() {
List<String> dbPaths = collectDatabaseFilePaths();
return FileUtil.uniquePathSubstrings(dbPaths);
}
public void updateAllTabTitles() {
List<String> paths = getUniquePathParts();
for (int i = 0; i < getBasePanelCount(); i++) {
String uniqPath = paths.get(i);
Optional<File> file = getBasePanelAt(i).getBibDatabaseContext().getDatabaseFile();
if (file.isPresent()) {
if (!uniqPath.equals(file.get().getName()) && uniqPath.contains(File.separator)) {
// remove filename
uniqPath = uniqPath.substring(0, uniqPath.lastIndexOf(File.separator));
tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle() + " \u2014 " + uniqPath);
} else {
// set original filename (again)
tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle());
}
} else {
tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle());
}
tabbedPane.getTabs().get(i).setTooltip(new Tooltip(file.map(File::getAbsolutePath).orElse(null)));
}
}
public void addTab(BasePanel basePanel, boolean raisePanel) {
DefaultTaskExecutor.runInJavaFXThread(() -> {
// add tab
Tab newTab = new Tab(basePanel.getTabTitle(), basePanel);
tabbedPane.getTabs().add(newTab);
newTab.setOnCloseRequest(event -> {
closeTab((BasePanel) newTab.getContent());
event.consume();
});
// update all tab titles
updateAllTabTitles();
if (raisePanel) {
tabbedPane.getSelectionModel().select(newTab);
}
// Register undo/redo listener
basePanel.getUndoManager().registerListener(new UndoRedoEventManager());
BibDatabaseContext context = basePanel.getBibDatabaseContext();
if (readyForAutosave(context)) {
AutosaveManager autosaver = AutosaveManager.start(context);
autosaver.registerListener(new AutosaveUIManager(basePanel));
}
BackupManager.start(context, Globals.entryTypesManager, prefs);
// Track opening
trackOpenNewDatabase(basePanel);
});
}
private void trackOpenNewDatabase(BasePanel basePanel) {
Map<String, String> properties = new HashMap<>();
Map<String, Double> measurements = new HashMap<>();
measurements.put("NumberOfEntries", (double) basePanel.getBibDatabaseContext().getDatabase().getEntryCount());
Globals.getTelemetryClient().ifPresent(client -> client.trackEvent("OpenNewDatabase", properties, measurements));
}
public BasePanel addTab(BibDatabaseContext databaseContext, boolean raisePanel) {
Objects.requireNonNull(databaseContext);
BasePanel bp = new BasePanel(this, BasePanelPreferences.from(Globals.prefs), databaseContext, ExternalFileTypes.getInstance());
addTab(bp, raisePanel);
return bp;
}
private boolean readyForAutosave(BibDatabaseContext context) {
return ((context.getLocation() == DatabaseLocation.SHARED) ||
((context.getLocation() == DatabaseLocation.LOCAL) && Globals.prefs.getBoolean(JabRefPreferences.LOCAL_AUTO_SAVE)))
&&
context.getDatabaseFile().isPresent();
}
/**
* Opens the import inspection dialog to let the user decide which of the given entries to import.
*
* @param panel The BasePanel to add to.
* @param entries The entries to add.
*/
private void addImportedEntries(final BasePanel panel, final List<BibEntry> entries) {
BackgroundTask<List<BibEntry>> task = BackgroundTask.wrap(() -> entries);
ImportEntriesDialog dialog = new ImportEntriesDialog(panel.getBibDatabaseContext(), task);
dialog.setTitle(Localization.lang("Import"));
dialog.showAndWait();
}
public FileHistoryMenu getFileHistory() {
return fileHistory;
}
/**
* Set the visibility of the progress bar in the right end of the status line at the bottom of the frame.
*/
public void setProgressBarVisible(final boolean visible) {
progressBar.setVisible(visible);
}
/**
* Sets the indeterminate status of the progress bar.
* <p>
*/
public void setProgressBarIndeterminate(final boolean value) {
progressBar.setProgress(ProgressBar.INDETERMINATE_PROGRESS);
}
/**
* Return a boolean, if the selected entry have file
*
* @param selectEntryList A selected entries list of the current base pane
* @return true, if the selected entry contains file. false, if multiple entries are selected or the selected entry
* doesn't contains file
*/
private boolean isExistFile(List<BibEntry> selectEntryList) {
if (selectEntryList.size() == 1) {
BibEntry selectedEntry = selectEntryList.get(0);
return selectedEntry.getField(StandardField.FILE).isPresent();
}
return false;
}
/**
* Return a boolean, if the selected entry have url or doi
*
* @param selectEntryList A selected entries list of the current base pane
* @return true, if the selected entry contains url or doi. false, if multiple entries are selected or the selected
* entry doesn't contains url or doi
*/
private boolean isExistURLorDOI(List<BibEntry> selectEntryList) {
if (selectEntryList.size() == 1) {
BibEntry selectedEntry = selectEntryList.get(0);
return (selectedEntry.getField(StandardField.URL).isPresent() || selectedEntry.getField(StandardField.DOI).isPresent());
}
return false;
}
/**
* Ask if the user really wants to close the given database
*
* @return true if the user choose to close the database
*/
private boolean confirmClose(BasePanel panel) {
String filename = panel.getBibDatabaseContext()
.getDatabasePath()
.map(Path::toAbsolutePath)
.map(Path::toString)
.orElse(GUIGlobals.UNTITLED_TITLE);
ButtonType saveChanges = new ButtonType(Localization.lang("Save changes"), ButtonBar.ButtonData.YES);
ButtonType discardChanges = new ButtonType(Localization.lang("Discard changes"), ButtonBar.ButtonData.NO);
ButtonType cancel = new ButtonType(Localization.lang("Return to JabRef"), ButtonBar.ButtonData.CANCEL_CLOSE);
Optional<ButtonType> response = dialogService.showCustomButtonDialogAndWait(Alert.AlertType.CONFIRMATION,
Localization.lang("Save before closing"),
Localization.lang("Library '%0' has changed.", filename),
saveChanges, discardChanges, cancel);
if (response.isPresent() && response.get().equals(saveChanges)) {
// The user wants to save.
try {
SaveDatabaseAction saveAction = new SaveDatabaseAction(panel, Globals.prefs, Globals.entryTypesManager);
if (saveAction.save()) {
// Saved, now exit.
return true;
}
// The action was either canceled or unsuccessful.
dialogService.notify(Localization.lang("Unable to save library"));
} catch (Throwable ex) {
LOGGER.error("A problem occurred when trying to save the file", ex);
dialogService.showErrorDialogAndWait(Localization.lang("Save library"), Localization.lang("Could not save file."), ex);
}
// Save was cancelled or an error occurred.
return false;
}
return !response.isPresent() || !response.get().equals(cancel);
}
private void closeTab(BasePanel panel) {
// empty tab without database
if (panel == null) {
return;
}
BibDatabaseContext context = panel.getBibDatabaseContext();
if (panel.isModified() && (context.getLocation() == DatabaseLocation.LOCAL)) {
if (confirmClose(panel)) {
removeTab(panel);
} else {
return;
}
} else if (context.getLocation() == DatabaseLocation.SHARED) {
context.convertToLocalDatabase();
context.getDBMSSynchronizer().closeSharedDatabase();
context.clearDBMSSynchronizer();
removeTab(panel);
} else {
removeTab(panel);
}
AutosaveManager.shutdown(context);
BackupManager.shutdown(context);
}
private void removeTab(BasePanel panel) {
DefaultTaskExecutor.runInJavaFXThread(() -> {
panel.cleanUp();
tabbedPane.getTabs().remove(getTab(panel));
setWindowTitle();
dialogService.notify(Localization.lang("Closed library") + '.');
// update tab titles
updateAllTabTitles();
});
}
public void closeCurrentTab() {
removeTab(getCurrentBasePanel());
}
public OpenDatabaseAction getOpenDatabaseAction() {
return new OpenDatabaseAction(this);
}
public SidePaneManager getSidePaneManager() {
return sidePaneManager;
}
public PushToApplicationsManager getPushToApplicationsManager() {
return pushToApplicationsManager;
}
public GlobalSearchBar getGlobalSearchBar() {
return globalSearchBar;
}
public CountingUndoManager getUndoManager() {
return undoManager;
}
public DialogService getDialogService() {
return dialogService;
}
/**
* The action concerned with closing the window.
*/
private class CloseAction extends SimpleCommand {
@Override
public void execute() {
quit();
}
}
/**
* Class for handling general actions; cut, copy and paste. The focused component is kept track of by
* Globals.focusListener, and we call the action stored under the relevant name in its action map.
*/
private class EditAction extends SimpleCommand {
private final Actions command;
public EditAction(Actions command) {
this.command = command;
}
@Override
public String toString() {
return this.command.toString();
}
@Override
public void execute() {
Node focusOwner = mainStage.getScene().getFocusOwner();
if (focusOwner != null) {
if (focusOwner instanceof TextInputControl) {
// Focus is on text field -> copy/paste/cut selected text
TextInputControl textInput = (TextInputControl) focusOwner;
switch (command) {
case COPY:
textInput.copy();
break;
case CUT:
textInput.cut();
break;
case PASTE:
// handled by FX in TextInputControl#paste
break;
default:
throw new IllegalStateException("Only cut/copy/paste supported but got " + command);
}
} else {
// Not sure what is selected -> copy/paste/cut selected entries
switch (command) {
case COPY:
getCurrentBasePanel().copy();
break;
case CUT:
getCurrentBasePanel().cut();
break;
case PASTE:
// handled by FX in TextInputControl#paste
break;
default:
throw new IllegalStateException("Only cut/copy/paste supported but got " + command);
}
}
}
}
}
private void setDefaultTableFontSize() {
GUIGlobals.setFont(Globals.prefs.getIntDefault(JabRefPreferences.FONT_SIZE));
for (BasePanel basePanel : getBasePanelList()) {
basePanel.updateTableFont();
}
dialogService.notify(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize())));
}
private void increaseTableFontSize() {
GUIGlobals.setFont(GUIGlobals.currentFont.getSize() + 1);
for (BasePanel basePanel : getBasePanelList()) {
basePanel.updateTableFont();
}
dialogService.notify(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize())));
}
private void decreaseTableFontSize() {
double currentSize = GUIGlobals.currentFont.getSize();
if (currentSize < 2) {
return;
}
GUIGlobals.setFont(currentSize - 1);
for (BasePanel basePanel : getBasePanelList()) {
basePanel.updateTableFont();
}
dialogService.notify(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize())));
}
private class CloseDatabaseAction extends SimpleCommand {
@Override
public void execute() {
closeTab(getCurrentBasePanel());
}
}
private class UndoRedoEventManager {
@Subscribe
public void listen(UndoRedoEvent event) {
updateTexts(event);
JabRefFrame.this.getCurrentBasePanel().updateEntryEditorIfShowing();
}
@Subscribe
public void listen(AddUndoableActionEvent event) {
updateTexts(event);
}
private void updateTexts(UndoChangeEvent event) {
/* TODO
SwingUtilities.invokeLater(() -> {
undo.putValue(Action.SHORT_DESCRIPTION, event.getUndoDescription());
undo.setEnabled(event.isCanUndo());
redo.putValue(Action.SHORT_DESCRIPTION, event.getRedoDescription());
redo.setEnabled(event.isCanRedo());
});
*/
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.changelog.fs;
import org.apache.flink.metrics.Histogram;
import org.apache.flink.runtime.io.AvailabilityProvider;
import org.apache.flink.runtime.io.AvailabilityProvider.AvailabilityHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import static java.lang.Thread.holdsLock;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.flink.util.ExceptionUtils.findThrowable;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkState;
/**
* A {@link StateChangeUploader} that waits for some configured amount of time before passing the
* accumulated state changes to the actual store.
*/
@ThreadSafe
class BatchingStateChangeUploader implements StateChangeUploader {
private static final Logger LOG = LoggerFactory.getLogger(BatchingStateChangeUploader.class);
private final RetryingExecutor retryingExecutor;
private final RetryPolicy retryPolicy;
private final StateChangeUploader delegate;
private final ScheduledExecutorService scheduler;
private final long scheduleDelayMs;
private final long sizeThresholdBytes;
/**
* The lock is used to synchronize concurrent accesses:
*
* <ul>
* <li>task thread and {@link #scheduler} thread to {@link #scheduled} tasks
* <li>task thread and {@link #retryingExecutor uploader} thread to {@link #uploadThrottle}
* </ul>
*
* <p>These code paths are independent, but a single lock is used for simplicity.
*/
private final Object lock = new Object();
@GuardedBy("lock")
private final Queue<UploadTask> scheduled;
@GuardedBy("lock")
private long scheduledBytesCounter;
private final AvailabilityHelper availabilityHelper;
/**
* There should be at most one scheduled future, so that changes are batched according to
* settings.
*/
@Nullable
@GuardedBy("lock")
private ScheduledFuture<?> scheduledFuture;
@Nullable
@GuardedBy("this")
private Throwable errorUnsafe;
@GuardedBy("lock")
private final UploadThrottle uploadThrottle;
private final Histogram uploadBatchSizes;
BatchingStateChangeUploader(
long persistDelayMs,
long sizeThresholdBytes,
RetryPolicy retryPolicy,
StateChangeUploader delegate,
int numUploadThreads,
long maxBytesInFlight,
ChangelogStorageMetricGroup metricGroup) {
this(
persistDelayMs,
sizeThresholdBytes,
maxBytesInFlight,
retryPolicy,
delegate,
SchedulerFactory.create(1, "ChangelogUploadScheduler", LOG),
new RetryingExecutor(numUploadThreads, metricGroup.getAttemptsPerUpload()),
metricGroup);
}
BatchingStateChangeUploader(
long persistDelayMs,
long sizeThresholdBytes,
long maxBytesInFlight,
RetryPolicy retryPolicy,
StateChangeUploader delegate,
ScheduledExecutorService scheduler,
RetryingExecutor retryingExecutor,
ChangelogStorageMetricGroup metricGroup) {
checkArgument(
sizeThresholdBytes <= maxBytesInFlight,
"sizeThresholdBytes (%s) must not exceed maxBytesInFlight (%s)",
sizeThresholdBytes,
maxBytesInFlight);
this.scheduleDelayMs = persistDelayMs;
this.scheduled = new LinkedList<>();
this.scheduler = scheduler;
this.retryPolicy = retryPolicy;
this.retryingExecutor = retryingExecutor;
this.sizeThresholdBytes = sizeThresholdBytes;
this.delegate = delegate;
this.uploadThrottle = new UploadThrottle(maxBytesInFlight);
this.availabilityHelper = new AvailabilityHelper();
this.availabilityHelper.resetAvailable();
this.uploadBatchSizes = metricGroup.getUploadBatchSizes();
metricGroup.registerUploadQueueSizeGauge(
() -> {
synchronized (scheduled) {
return scheduled.size();
}
});
}
@Override
public void upload(UploadTask uploadTask) throws IOException {
Throwable error = getErrorSafe();
if (error != null) {
LOG.debug("don't persist {} changesets, already failed", uploadTask.changeSets.size());
uploadTask.fail(error);
return;
}
LOG.debug("persist {} changeSets", uploadTask.changeSets.size());
try {
long size = uploadTask.getSize();
synchronized (lock) {
while (!uploadThrottle.hasCapacity()) {
lock.wait();
}
uploadThrottle.seizeCapacity(size);
if (!uploadThrottle.hasCapacity()) {
availabilityHelper.resetUnavailable();
}
scheduledBytesCounter += size;
scheduled.add(wrapWithSizeUpdate(uploadTask, size));
scheduleUploadIfNeeded();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
uploadTask.fail(e);
throw new IOException(e);
} catch (Exception e) {
uploadTask.fail(e);
throw e;
}
}
private void releaseCapacity(long size) {
CompletableFuture<?> toNotify = null;
synchronized (lock) {
boolean hadCapacityBefore = uploadThrottle.hasCapacity();
uploadThrottle.releaseCapacity(size);
lock.notifyAll();
if (!hadCapacityBefore && uploadThrottle.hasCapacity()) {
toNotify = availabilityHelper.getUnavailableToResetAvailable();
}
}
if (toNotify != null) {
toNotify.complete(null);
}
}
private void scheduleUploadIfNeeded() {
checkState(holdsLock(lock));
if (scheduleDelayMs == 0 || scheduledBytesCounter >= sizeThresholdBytes) {
if (scheduledFuture != null) {
scheduledFuture.cancel(false);
scheduledFuture = null;
}
drainAndSave();
} else if (scheduledFuture == null) {
scheduledFuture = scheduler.schedule(this::drainAndSave, scheduleDelayMs, MILLISECONDS);
}
}
private void drainAndSave() {
Collection<UploadTask> tasks;
synchronized (lock) {
tasks = new ArrayList<>(scheduled);
scheduled.clear();
scheduledBytesCounter = 0;
scheduledFuture = null;
}
try {
Throwable error = getErrorSafe();
if (error != null) {
tasks.forEach(task -> task.fail(error));
return;
}
uploadBatchSizes.update(tasks.size());
retryingExecutor.execute(retryPolicy, () -> delegate.upload(tasks));
} catch (Throwable t) {
tasks.forEach(task -> task.fail(t));
if (findThrowable(t, IOException.class).isPresent()) {
LOG.warn("Caught IO exception while uploading", t);
} else {
setErrorSafe(t);
throw t;
}
}
}
@Override
public void close() throws Exception {
LOG.debug("close");
scheduler.shutdownNow();
if (!scheduler.awaitTermination(5, SECONDS)) {
LOG.warn("Unable to cleanly shutdown scheduler in 5s");
}
ArrayList<UploadTask> drained;
synchronized (lock) {
drained = new ArrayList<>(scheduled);
scheduled.clear();
scheduledBytesCounter = 0;
}
CancellationException ce = new CancellationException();
drained.forEach(task -> task.fail(ce));
retryingExecutor.close();
delegate.close();
}
private synchronized Throwable getErrorSafe() {
return errorUnsafe;
}
private synchronized void setErrorSafe(Throwable t) {
errorUnsafe = t;
}
private UploadTask wrapWithSizeUpdate(UploadTask uploadTask, long size) {
return new UploadTask(
uploadTask.changeSets,
result -> {
try {
releaseCapacity(size);
} finally {
uploadTask.successCallback.accept(result);
}
},
(result, error) -> {
try {
releaseCapacity(size);
} finally {
uploadTask.failureCallback.accept(result, error);
}
});
}
@Override
public AvailabilityProvider getAvailabilityProvider() {
// This method can be called by multiple (task) threads.
// Though the field itself is final, implementation is generally not thread-safe.
// However, in case of reading stale AvailabilityHelper.availableFuture
// the task will either be notified about availability immediately;
// or back-pressured hard trying to seize capacity in upload()
return availabilityHelper;
}
}
| |
/*!
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright (c) 2002-2013 Pentaho Corporation.. All rights reserved.
*/
package org.pentaho.di.ui.trans.steps.mongodbinput.models;
import com.mongodb.DBObject;
import com.mongodb.util.JSON;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.parameters.DuplicateParamException;
import org.pentaho.di.core.parameters.NamedParams;
import org.pentaho.di.core.parameters.NamedParamsDefault;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.steps.mongodbinput.MongoDbInputMeta;
import org.pentaho.di.ui.trans.steps.mongodbinput.MongoDbInputDialog;
import org.pentaho.mongo.MongoDbException;
import org.pentaho.mongo.NamedReadPreference;
import org.pentaho.mongo.wrapper.MongoClientWrapper;
import org.pentaho.mongo.wrapper.MongoWrapperUtil;
import org.pentaho.ui.xul.XulEventSourceAdapter;
import org.pentaho.ui.xul.stereotype.Bindable;
import org.pentaho.ui.xul.util.AbstractModelList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
public class MongoDbModel extends XulEventSourceAdapter {
private String hostname;
private String port;
private String dbName;
private Vector<String> dbNames = new Vector<String>();
private String collection;
private Vector<String> collections = new Vector<String>();
private String fieldsQuery;
private String authenticationUser;
private String authenticationPassword;
private String jsonQuery;
private boolean m_aggPipeline = false;
private boolean m_useAllReplicaSetMembers = false;
private String m_connectTimeout = ""; // default - never time out
private String m_socketTimeout = ""; // default - never time out
private boolean m_kerberos = false;
/**
* primary, primaryPreferred, secondary, secondaryPreferred, nearest
*/
private String m_readPreference = NamedReadPreference.PRIMARY.getName();
private static final String AUTO_GENERATED_PARAMETER = "AUTO.GENERATED.PARAMETER";
private AbstractModelList<MongoDocumentField> fields = new AbstractModelList<MongoDocumentField>();
private AbstractModelList<MongoTag> tags = new AbstractModelList<MongoTag>();
private MongoDbInputMeta mongo;
private LogChannel log;
public MongoDbModel( MongoDbInputMeta mongo ) {
super();
this.mongo = mongo;
log = new LogChannel( this.mongo );
initialize( this.mongo );
}
public boolean validate() {
boolean valid = false;
valid = ( !StringUtil.isEmpty( hostname ) )
// (!StringUtil.isEmpty(port)) && // port can be empty; MongoDb will assume 27017
&& ( !StringUtil.isEmpty( dbName ) ) && ( !StringUtil.isEmpty( collection ) ) && ( fields.size() > 0 );
firePropertyChange( "validate", null, valid );
return valid;
}
/**
* @return the hostnames (comma separated: host:<port>)
*/
public String getHostnames() {
return hostname;
}
/**
* @param hostname the hostnames to set (comma separated: host:<port>)
*/
public void setHostnames( String hostname ) {
String prevVal = this.hostname;
this.hostname = hostname;
firePropertyChange( "hostnames", prevVal, hostname );
validate();
}
/**
* @return the port. This is a port to use for all hostnames (avoids having to
* specify the same port for each hostname in the hostnames list
*/
public String getPort() {
return port;
}
/**
* @param port the port. This is a port to use for all hostnames (avoids
* having to specify the same port for each hostname in the hostnames
* list
*/
public void setPort( String port ) {
String prevVal = this.port;
this.port = port;
firePropertyChange( "port", prevVal, port );
validate();
}
/**
* @return the dbName
*/
public String getDbName() {
return dbName;
}
/**
* @param dbName the dbName to set
*/
public void setDbName( String dbName ) {
String prevVal = this.dbName;
this.dbName = dbName;
firePropertyChange( "database", prevVal, dbName == null ? "" : dbName );
}
public Collection<String> getDbNames() {
return dbNames;
}
public void setDbNames( Vector<String> dbs ) {
Collection<String> prevVal = this.dbNames;
this.dbNames = dbs;
// add the current selection, even if not in the list...
// users are allowed to manually add database names
// TODO: check behavior in Spoon...
if ( !Const.isEmpty( dbName ) ) {
if ( !dbNames.contains( dbName ) ) {
dbNames.add( dbName );
}
}
firePropertyChange( "database", prevVal, dbs );
}
/**
* @return the fields
*/
public String getFieldsName() {
return fieldsQuery;
}
/**
* @param dbName the dbName to set
*/
public void setFieldsName( String fields ) {
String prevVal = this.fieldsQuery;
this.fieldsQuery = fields;
firePropertyChange( "fieldsQuery", prevVal, fields );
}
/**
* @return the collection
*/
public String getCollection() {
return collection;
}
/**
* @param collection the collection to set
*/
public void setCollection( String collection ) {
String prevVal = this.collection;
this.collection = collection;
firePropertyChange( "collection", prevVal, collection == null ? "" : collection );
}
public Collection<String> getCollections() {
return collections;
}
public void setCollections( Vector<String> collections ) {
Collection<String> prevVal = this.collections;
this.collections = collections;
// add the current selection, even if not in the list...
// users are allowed to manually add collection names
// TODO: check behavior in Spoon...
if ( !Const.isEmpty( collection ) ) {
if ( !collections.contains( collection ) ) {
collections.add( collection );
}
}
firePropertyChange( "collection", prevVal, collections );
}
/**
* @return the authenticationUser
*/
public String getAuthenticationUser() {
return authenticationUser;
}
/**
* @param authenticationUser the authenticationUser to set
*/
public void setAuthenticationUser( String authenticationUser ) {
String prevVal = this.authenticationUser;
this.authenticationUser = authenticationUser;
firePropertyChange( "authenticationUser", prevVal, authenticationUser );
}
/**
* @return the authenticationPassword
*/
public String getAuthenticationPassword() {
return authenticationPassword;
}
/**
* @param authenticationPassword the authenticationPassword to set
*/
public void setAuthenticationPassword( String authenticationPassword ) {
String prevVal = this.authenticationPassword;
this.authenticationPassword = authenticationPassword;
firePropertyChange( "authenticationPassword", prevVal, authenticationPassword );
}
/**
* @return the jsonQuery
*/
public String getJsonQuery() {
return jsonQuery;
}
/**
* @param jsonQuery the jsonQuery to set
*/
@Bindable public void setJsonQuery( String jsonQuery ) {
String prevVal = this.jsonQuery;
this.jsonQuery = jsonQuery;
firePropertyChange( "jsonQuery", prevVal, jsonQuery );
}
/**
* Set whether the supplied query is actually a pipeline specification
*
* @param q true if the supplied query is a pipeline specification
* m_aggPipeline = q;
*/
public void setQueryIsPipeline( boolean q ) {
Boolean prevVal = new Boolean( this.m_aggPipeline );
m_aggPipeline = q;
firePropertyChange( "m_aggPipeline", prevVal, new Boolean( q ) );
}
/**
* Get whether the supplied query is actually a pipeline specification
*
* @true true if the supplied query is a pipeline specification
*/
public boolean getQueryIsPipeline() {
return m_aggPipeline;
}
/**
* Set whether to include all members in the replica set for querying
*/
public void setUseAllReplicaMembers( boolean u ) {
Boolean prevVal = new Boolean( this.m_useAllReplicaSetMembers );
m_useAllReplicaSetMembers = u;
firePropertyChange( "m_useAllReplicaSetMembers", prevVal, new Boolean( u ) );
}
/**
* Set whether to use kerberos authentication
*
* @param k true if kerberos is to be used
*/
public void setUseKerberosAuthentication( boolean k ) {
Boolean prevVal = new Boolean( this.m_kerberos );
m_kerberos = k;
firePropertyChange( "m_kerberos", prevVal, new Boolean( k ) );
}
/**
* Get whether to use kerberos authentication
*
* @return true if kerberos is to be used
*/
public boolean getUseKerberosAuthentication() {
return m_kerberos;
}
/**
* Get whether to include all members in the replica set for querying
*/
public boolean getUseAllReplicaMembers() {
return m_useAllReplicaSetMembers;
}
/**
* Set the connection timeout. The default is never timeout
*
* @param to the connection timeout in milliseconds
*/
public void setConnectTimeout( String to ) {
String prevVal = this.m_connectTimeout;
m_connectTimeout = to;
firePropertyChange( "connectTimeout", prevVal, to );
}
/**
* Get the connection timeout. The default is never timeout
*
* @return the connection timeout in milliseconds
*/
public String getConnectTimeout() {
return m_connectTimeout;
}
/**
* Set the number of milliseconds to attempt a send or receive on a socket
* before timing out.
*
* @param so the number of milliseconds before socket timeout
*/
public void setSocketTimeout( String so ) {
String prevVal = this.m_socketTimeout;
m_socketTimeout = so;
firePropertyChange( "socketTimeout", prevVal, so );
}
/**
* Get the number of milliseconds to attempt a send or receive on a socket
* before timing out.
*
* @return the number of milliseconds before socket timeout
*/
public String getSocketTimeout() {
return m_socketTimeout;
}
/**
* Set the read preference to use - primary, primaryPreferred, secondary,
* secondaryPreferred or nearest.
*
* @param preference the read preference to use
*/
public void setReadPreference( String preference ) {
String prevVal = this.m_readPreference;
m_readPreference = Const.isEmpty( preference ) ? NamedReadPreference.PRIMARY.getName() : preference;
firePropertyChange( "readPreference", prevVal, preference );
}
/**
* Get the read preference to use - primary, primaryPreferred, secondary,
* secondaryPreferred or nearest.
*
* @return the read preference to use
*/
public String getReadPreference() {
return m_readPreference;
}
public AbstractModelList<MongoDocumentField> getFields() {
return fields;
}
public void save() {
saveMeta( mongo );
List<String> variablesUsed = new ArrayList<String>();
StringUtil.getUsedVariables( mongo.getJsonQuery(), variablesUsed, true );
TransMeta trans = mongo.getParentStepMeta().getParentTransMeta();
for ( String variable : variablesUsed ) {
try {
// The description is a flag telling us that this parameter was not added by the user,
// but auto generated by the system... important for managing parameterization within
// the embedded datasources...
trans.addParameterDefinition( StringUtil.getVariableName( variable ), "", AUTO_GENERATED_PARAMETER );
} catch ( DuplicateParamException e ) {
// this is GOOD ... we do not want duplicates...
log.logBasic( "Failed attempt to add duplicate variable ".concat( variable ) );
}
}
String[] parametersAdded = trans.listParameters();
NamedParams params = new NamedParamsDefault();
String description = null;
// In order to remove any previously auto-generated parameters, we must
// build the list of parameters we wish to keep, then erase all parameters
// in the transformation, then re-add the keepers list. Yes. really.
for ( String parameter : parametersAdded ) {
try {
description = trans.getParameterDescription( parameter );
if ( description.equalsIgnoreCase( AUTO_GENERATED_PARAMETER ) ) {
if ( variablesUsed.contains( parameter ) ) {
params.addParameterDefinition( parameter, trans.getParameterDefault( parameter ), description );
}
} else {
params.addParameterDefinition( parameter, trans.getParameterDefault( parameter ), description );
}
} catch ( Exception e ) {
log.logError( "Can not locate parameter " + parameter + ".", e );
}
}
trans.eraseParameters();
for ( String key : params.listParameters() ) {
try {
trans.addParameterDefinition( key, params.getParameterDefault( key ), params.getParameterDescription( key ) );
} catch ( Exception e ) {
log.logError( "Cannot add parameter " + key + ".", e );
}
}
trans.activateParameters();
}
public void saveMeta( MongoDbInputMeta meta ) {
meta.setOutputJson( false );
meta.setJsonQuery( this.jsonQuery );
meta.setAuthenticationPassword( this.authenticationPassword );
meta.setAuthenticationUser( this.authenticationUser );
meta.setCollection( collection );
meta.setConnectTimeout( this.m_connectTimeout );
meta.setDbName( this.dbName );
meta.setFieldsName( this.fieldsQuery );
meta.setHostnames( this.hostname );
meta.setPort( this.port );
meta.setQueryIsPipeline( this.m_aggPipeline );
meta.setReadPreference( this.m_readPreference );
meta.setSocketTimeout( this.m_socketTimeout );
meta.setMongoFields( MongoDocumentField.convertFromList( this.getFields() ) );
meta.setUseKerberosAuthentication( m_kerberos );
meta.setUseAllReplicaSetMembers( this.m_useAllReplicaSetMembers );
meta.setReadPrefTagSets( MongoTag.convertFromList( this.tags ) );
}
private void initialize( MongoDbInputMeta m ) {
setJsonQuery( m.getJsonQuery() );
setAuthenticationPassword( m.getAuthenticationPassword() );
setAuthenticationUser( m.getAuthenticationUser() );
setCollection( m.getCollection() );
setCollections( new Vector<String>() );
setDbName( m.getDbName() );
setDbNames( new Vector<String>() );
setFieldsName( m.getFieldsName() );
setHostnames( m.getHostnames() );
setPort( m.getPort() );
setQueryIsPipeline( m.getQueryIsPipeline() );
setReadPreference( m.getReadPreference() );
setConnectTimeout( m.getConnectTimeout() );
setSocketTimeout( m.getSocketTimeout() );
MongoDocumentField.convertList( m.getMongoFields(), getFields() );
setUseAllReplicaMembers( m.getUseAllReplicaSetMembers() );
setUseKerberosAuthentication( m.getUseKerberosAuthentication() );
MongoTag.convertList( m.getReadPrefTagSets(), getTags() );
}
public AbstractModelList<MongoTag> getTags() {
return tags;
}
public void clear() {
MongoDbInputMeta m = new MongoDbInputMeta();
m.setReadPreference( NamedReadPreference.PRIMARY.getName() );
initialize( m );
}
public Collection<String> getPossibleReadPreferences() {
return NamedReadPreference.getPreferenceNames();
}
/**
* Retrieve the list of database names from MongoDB based on what the user entered for hostname, port,etc.
* NOTE: Much of this could move to the MongoDbInputData class, as it is copied almost verbatim from the
* Spoon MongoDbInputDialog class.
*
* @return Vector<String> list of database names
* @throws Exception Should anything go wrong connecting to MongoDB, it will be reported with this exception
*/
public Vector<String> getDatabaseNamesFromMongo() throws Exception {
Vector<String> dbs = new Vector<String>();
if ( Const.isEmpty( hostname ) ) {
log.logBasic( "Fetching database names aborted. Missing hostname." );
return dbs;
}
final MongoDbInputMeta meta = new MongoDbInputMeta();
final TransMeta transMeta = new TransMeta();
saveMeta( meta );
try {
MongoClientWrapper wrapper = MongoWrapperUtil.createMongoClientWrapper( meta, transMeta, log );
List<String> dbNames = null;
try {
dbNames = wrapper.getDatabaseNames();
} finally {
wrapper.dispose();
}
for ( String s : dbNames ) {
dbs.add( s );
}
return dbs;
} catch ( Exception e ) {
log.logError( "Unexpected error retrieving database names from MongoDb. Check your connection details.", meta );
throw new MongoDbException(
"Unexpected error retrieving database names from MongoDb. Check your connection details.", e );
}
}
/**
* Retrieve the list of collection names from MongoDB based on what the user entered for hostname, port,etc.
* NOTE: Much of this could move to the MongoDbInputData class, as it is copied almost verbatim from the
* Spoon MongoDbInputDialog class.
*
* @return Vector<String> list of collection names
* @throws Exception Should anything go wrong connecting to MongoDB, it will be reported with this exception
*/
public Vector<String> getCollectionNamesFromMongo() throws MongoDbException {
Vector<String> newCollections = new Vector<String>();
if ( Const.isEmpty( dbName ) || Const.isEmpty( hostname ) ) {
log.logBasic( "Fetching collection names aborted. Missing database name or hostname." );
return newCollections;
}
MongoDbInputMeta meta = new MongoDbInputMeta();
saveMeta( meta );
try {
MongoClientWrapper wrapper = MongoWrapperUtil.createMongoClientWrapper( meta, new TransMeta(), log );
Set<String> collections = new HashSet<String>();
try {
collections = wrapper.getCollectionsNames( dbName );
} finally {
wrapper.dispose();
}
for ( String c : collections ) {
newCollections.add( c );
}
return newCollections;
} catch ( Exception e ) {
log.logError(
"Unexpected error retrieving collection names from MongoDb. Check that your database name is valid.", meta );
throw new MongoDbException(
"Unexpected error retrieving collection names from MongoDb. Check that your database name is valid.", e );
}
}
/**
* @param mergeStrategy 0 = Add new
* 1 = Add all
* 2 = Clear and add all
* 3 = Cancel
* @throws MongoDbException
*/
public void getFieldsFromMongo( int mergeStrategy ) throws MongoDbException {
// TODO: This should be a sample dialog requested from the user ...
int samples = 100;
MongoDbInputMeta meta = new MongoDbInputMeta();
if ( samples > 0 ) {
try {
saveMeta( meta );
boolean result = MongoDbInputDialog.discoverFields( meta, new TransMeta(), samples );
if ( !result ) {
log.logBasic( "No fields were returned from MongoDb. Check your query, and/or connection details." );
throw new MongoDbException(
"No fields were returned from MongoDb. Check your query, and/or connection details." );
} else {
switch ( mergeStrategy ) {
case 0:
MongoDocumentField.trimList( meta.getMongoFields(), getFields() );
break;
case 1:
break;
case 2:
getFields().removeAll( getFields() );
break;
}
MongoDocumentField.convertList( meta.getMongoFields(), getFields() );
}
} catch ( KettleException e ) {
log.logError( "Unexpected error retrieving fields from MongoDb. Check your connection details.", meta );
throw new MongoDbException( "Unexpected error retrieving fields from MongoDb. Check your connection details.",
e );
}
}
}
/**
* @param mergeStrategy 0 = Add new
* 1 = Add all
* 2 = Clear and add all
* 3 = Cancel
* @throws MongoDbException
*/
public void getTagsFromMongo( int mergeStrategy ) throws MongoDbException {
if ( Const.isEmpty( hostname ) ) {
log.logBasic( "Fetching tags aborted. Missing hostname." );
return;
}
MongoDbInputMeta meta = new MongoDbInputMeta();
saveMeta( meta );
try {
List<String> repSetTags = new ArrayList<String>();
MongoClientWrapper wrapper = MongoWrapperUtil.createMongoClientWrapper( meta, new TransMeta(), log );
try {
repSetTags = wrapper.getAllTags();
} finally {
wrapper.dispose();
}
switch ( mergeStrategy ) {
case 0:
MongoTag.trimList( repSetTags, getTags() );
break;
case 1:
break;
case 2:
getTags().removeAll( getTags() );
break;
}
MongoTag.convertList( repSetTags, getTags() );
} catch ( Exception e ) {
log.logError( "Unexpected error retrieving tags from MongoDb. Check connection details.", e );
throw new MongoDbException( "Unexpected error retrieving tags from MongoDb. Check your connection details.", e );
}
}
public List<String> testSelectedTags() throws MongoDbException {
List<String> tagSets = null;
if ( Const.isEmpty( hostname ) ) {
log.logBasic( "Testing tags aborted. Missing hostname." );
return tagSets;
}
if ( tags.isEmpty() ) {
log.logBasic( "No tags available for testing." );
return tagSets;
}
List<DBObject> mongoTagSets = new ArrayList<DBObject>();
List<String> setsToTest = MongoTag.convertFromList( tags );
for ( String tagSet : setsToTest ) {
try {
DBObject set = (DBObject) JSON.parse( tagSet );
if ( set != null ) {
mongoTagSets.add( set );
}
} catch ( Exception e ) {
log.logError( "Error parsing MongoDb tag sets.", e );
throw new MongoDbException( "Error parsing MongoDb tag sets. Check your tag set names and try again.", e );
}
}
if ( mongoTagSets.isEmpty() ) {
log.logBasic( "Could not parse tags for testing." );
return tagSets;
}
MongoDbInputMeta meta = new MongoDbInputMeta();
saveMeta( meta );
try {
MongoClientWrapper wrapper = MongoWrapperUtil.createMongoClientWrapper( meta, new TransMeta(), log );
List<String> result = new ArrayList<String>();
try {
result = wrapper.getReplicaSetMembersThatSatisfyTagSets( mongoTagSets );
} finally {
wrapper.dispose();
}
if ( result.size() == 0 ) {
log.logBasic( "No replica set members match tag sets." );
return tagSets;
}
tagSets = new ArrayList<String>();
for ( String dbObject : result ) {
tagSets.add( dbObject );
}
} catch ( Exception e ) {
log.logError( "Unexpected error evaluating tag sets against replica members.", e );
throw new MongoDbException( "Unexpected error evaluating tag sets against replica members.", e );
}
return tagSets;
}
}
| |
package at.ac.univie.cs.swa.soa.generator;
import at.ac.univie.cs.swa.soa.sdl.DataElement;
import at.ac.univie.cs.swa.soa.sdl.Operation;
import at.ac.univie.cs.swa.soa.sdl.SDL;
import at.ac.univie.cs.swa.soa.sdl.Service;
import com.google.common.base.Objects;
import com.google.common.collect.Iterables;
import java.io.File;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.xtend2.lib.StringConcatenation;
import org.eclipse.xtext.generator.IFileSystemAccess;
import org.eclipse.xtext.xbase.lib.IterableExtensions;
@SuppressWarnings("all")
public class ClientGenerator {
public void generateClient(final IFileSystemAccess fsa, final SDL dsl) {
String _name = dsl.getName();
String _lowerCase = _name.toLowerCase();
final String packagePath = _lowerCase.replace(".", File.separator);
EList<Service> _services = dsl.getServices();
final Iterable<Service> webServices = Iterables.<Service>filter(_services, Service.class);
boolean _and = false;
boolean _notEquals = (!Objects.equal(webServices, null));
if (!_notEquals) {
_and = false;
} else {
boolean _isEmpty = IterableExtensions.isEmpty(webServices);
boolean _not = (!_isEmpty);
_and = (_notEquals && _not);
}
if (_and) {
for (final Service s : webServices) {
{
String _plus = (packagePath + File.separator);
String _name_1 = s.getName();
String _plus_1 = (_plus + _name_1);
final String file = (_plus_1 + "Client.java");
CharSequence _generate = this.generate(dsl, s);
fsa.generateFile(file, _generate);
}
}
}
}
protected CharSequence generate(final SDL dsl, final Service s) {
CharSequence _xblockexpression = null;
{
String _name = dsl.getName();
final String packageName = _name.toLowerCase();
String _name_1 = s.getName();
final String className = (_name_1 + "Client");
String _xifexpression = null;
boolean _notEquals = (!Objects.equal(s, null));
if (_notEquals) {
EObject _eContainer = s.eContainer();
String _name_2 = ((SDL) _eContainer).getName();
_xifexpression = _name_2;
} else {
_xifexpression = null;
}
final String serviceQualifiedPath = _xifexpression;
StringConcatenation _builder = new StringConcatenation();
_builder.append("\t\t");
_builder.append("package ");
_builder.append(packageName, " ");
_builder.append(";");
_builder.newLineIfNotEmpty();
_builder.newLine();
_builder.append("import ");
_builder.append(serviceQualifiedPath, "");
_builder.append(".");
String _name_3 = s.getName();
_builder.append(_name_3, "");
_builder.append(";");
_builder.newLineIfNotEmpty();
_builder.newLine();
_builder.append("public final class ");
_builder.append(className, "");
_builder.append(" {");
_builder.newLineIfNotEmpty();
_builder.newLine();
_builder.append("\t\t");
_builder.append("public static void main(String args[]) throws java.lang.Exception {");
_builder.newLine();
_builder.newLine();
_builder.append("\t\t");
String _name_4 = s.getName();
_builder.append(_name_4, " ");
_builder.append("Service ss = new ");
String _name_5 = s.getName();
_builder.append(_name_5, " ");
_builder.append("Service();");
_builder.newLineIfNotEmpty();
_builder.append("\t\t");
String _name_6 = s.getName();
_builder.append(_name_6, " ");
_builder.append(" port = ss.get");
String _name_7 = s.getName();
_builder.append(_name_7, " ");
_builder.append("(); ");
_builder.newLineIfNotEmpty();
_builder.newLine();
{
EList<Operation> _operations = s.getOperations();
for(final Operation operation : _operations) {
_builder.append("\t\t");
_builder.append("System.out.println(\"Invoking ");
String _name_8 = operation.getName();
_builder.append(_name_8, " ");
_builder.append("...\");");
_builder.newLineIfNotEmpty();
_builder.append("\t\t");
CharSequence _generateOperationInvocation = this.generateOperationInvocation(operation);
_builder.append(_generateOperationInvocation, " ");
_builder.newLineIfNotEmpty();
}
}
_builder.append(" ");
_builder.append("}");
_builder.newLine();
_builder.append("}");
_builder.newLine();
_xblockexpression = (_builder);
}
return _xblockexpression;
}
protected CharSequence generateOperationInvocation(final Operation operation) {
CharSequence _xblockexpression = null;
{
boolean _and = false;
boolean _and_1 = false;
DataElement _inputType = operation.getInputType();
boolean _notEquals = (!Objects.equal(_inputType, null));
if (!_notEquals) {
_and_1 = false;
} else {
DataElement _inputType_1 = operation.getInputType();
String _name = _inputType_1.getName();
boolean _notEquals_1 = (!Objects.equal(_name, null));
_and_1 = (_notEquals && _notEquals_1);
}
if (!_and_1) {
_and = false;
} else {
DataElement _inputType_2 = operation.getInputType();
String _name_1 = _inputType_2.getName();
boolean _isEmpty = _name_1.isEmpty();
boolean _not = (!_isEmpty);
_and = (_and_1 && _not);
}
final boolean hasInput = _and;
boolean _and_2 = false;
boolean _and_3 = false;
DataElement _outputType = operation.getOutputType();
boolean _notEquals_2 = (!Objects.equal(_outputType, null));
if (!_notEquals_2) {
_and_3 = false;
} else {
DataElement _outputType_1 = operation.getOutputType();
String _name_2 = _outputType_1.getName();
boolean _notEquals_3 = (!Objects.equal(_name_2, null));
_and_3 = (_notEquals_2 && _notEquals_3);
}
if (!_and_3) {
_and_2 = false;
} else {
DataElement _outputType_2 = operation.getOutputType();
String _name_3 = _outputType_2.getName();
boolean _isEmpty_1 = _name_3.isEmpty();
boolean _not_1 = (!_isEmpty_1);
_and_2 = (_and_3 && _not_1);
}
final boolean hasOutput = _and_2;
String _xifexpression = null;
DataElement _inputType_3 = operation.getInputType();
boolean _notEquals_4 = (!Objects.equal(_inputType_3, null));
if (_notEquals_4) {
DataElement _inputType_4 = operation.getInputType();
EObject _eContainer = _inputType_4.eContainer();
String _name_4 = ((SDL) _eContainer).getName();
_xifexpression = _name_4;
} else {
_xifexpression = null;
}
final String inputQualifiedPath = _xifexpression;
String _xifexpression_1 = null;
DataElement _outputType_3 = operation.getOutputType();
boolean _notEquals_5 = (!Objects.equal(_outputType_3, null));
if (_notEquals_5) {
DataElement _outputType_4 = operation.getOutputType();
EObject _eContainer_1 = _outputType_4.eContainer();
String _name_5 = ((SDL) _eContainer_1).getName();
_xifexpression_1 = _name_5;
} else {
_xifexpression_1 = null;
}
final String outputQualifiedPath = _xifexpression_1;
StringConcatenation _builder = new StringConcatenation();
{
if (hasInput) {
_builder.append(inputQualifiedPath, "");
_builder.append(".");
DataElement _inputType_5 = operation.getInputType();
String _name_6 = _inputType_5.getName();
_builder.append(_name_6, "");
_builder.append(" ");
String _inputName = operation.getInputName();
_builder.append(_inputName, "");
_builder.append(" = new ");
_builder.append(inputQualifiedPath, "");
_builder.append(".");
DataElement _inputType_6 = operation.getInputType();
String _name_7 = _inputType_6.getName();
_builder.append(_name_7, "");
_builder.append("();");
_builder.newLineIfNotEmpty();
DataElement _inputType_7 = operation.getInputType();
CharSequence _generateValues = this.generateValues(_inputType_7);
_builder.append(_generateValues, "");
_builder.newLineIfNotEmpty();
}
}
{
if (hasOutput) {
_builder.append(outputQualifiedPath, "");
_builder.append(".");
DataElement _outputType_5 = operation.getOutputType();
String _name_8 = _outputType_5.getName();
_builder.append(_name_8, "");
_builder.append(" ");
String _outputName = operation.getOutputName();
_builder.append(_outputName, "");
_builder.append(" = port.");
String _name_9 = operation.getName();
_builder.append(_name_9, "");
_builder.append("(");
{
if (hasInput) {
String _inputName_1 = operation.getInputName();
_builder.append(_inputName_1, "");
}
}
_builder.append(");");
_builder.newLineIfNotEmpty();
_builder.append("System.out.println(\"");
String _name_10 = operation.getName();
_builder.append(_name_10, "");
_builder.append(".result=\" + ");
String _outputName_1 = operation.getOutputName();
_builder.append(_outputName_1, "");
_builder.append(");");
_builder.newLineIfNotEmpty();
} else {
_builder.append("port.");
String _name_11 = operation.getName();
_builder.append(_name_11, "");
_builder.append("(");
{
if (hasInput) {
String _inputName_2 = operation.getInputName();
_builder.append(_inputName_2, "");
}
}
_builder.append(");");
_builder.newLineIfNotEmpty();
}
}
_xblockexpression = (_builder);
}
return _xblockexpression;
}
protected CharSequence generateValues(final DataElement e) {
StringConcatenation _builder = new StringConcatenation();
return _builder;
}
}
| |
// Copyright 2012 Square, Inc.
package com.squareup.timessquare;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.Toast;
import com.squareup.timessquare.MonthCellDescriptor.RangeState;
import java.text.DateFormat;
import java.text.DateFormatSymbols;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import static java.util.Calendar.DATE;
import static java.util.Calendar.DAY_OF_MONTH;
import static java.util.Calendar.DAY_OF_WEEK;
import static java.util.Calendar.HOUR_OF_DAY;
import static java.util.Calendar.MILLISECOND;
import static java.util.Calendar.MINUTE;
import static java.util.Calendar.MONTH;
import static java.util.Calendar.SECOND;
import static java.util.Calendar.YEAR;
/**
* Android component to allow picking a date from a calendar view (a list of months). Must be
* initialized after inflation with {@link #init(Date, Date)} and can be customized with any of the
* {@link FluentInitializer} methods returned. The currently selected date can be retrieved with
* {@link #getSelectedDate()}.
*/
public class CalendarPickerView extends ListView {
public enum SelectionMode {
/**
* Only one date will be selectable. If there is already a selected date and you select a new
* one, the old date will be unselected.
*/
SINGLE,
/** Multiple dates will be selectable. Selecting an already-selected date will un-select it. */
MULTIPLE,
/**
* Allows you to select a date range. Previous selections are cleared when you either:
* <ul>
* <li>Have a range selected and select another date (even if it's in the current range).</li>
* <li>Have one date selected and then select an earlier date.</li>
* </ul>
*/
RANGE
}
private final CalendarPickerView.MonthAdapter adapter;
private final List<List<List<MonthCellDescriptor>>> cells =
new ArrayList<List<List<MonthCellDescriptor>>>();
final MonthView.Listener listener = new CellClickedListener();
final List<MonthDescriptor> months = new ArrayList<MonthDescriptor>();
final List<MonthCellDescriptor> selectedCells = new ArrayList<MonthCellDescriptor>();
final List<MonthCellDescriptor> highlightedCells = new ArrayList<MonthCellDescriptor>();
final List<Calendar> selectedCals = new ArrayList<Calendar>();
final List<Calendar> highlightedCals = new ArrayList<Calendar>();
private Locale locale;
private DateFormat monthNameFormat;
private DateFormat weekdayNameFormat;
private DateFormat fullDateFormat;
private Calendar minCal;
private Calendar maxCal;
private Calendar monthCounter;
private boolean displayOnly;
SelectionMode selectionMode;
Calendar today;
private int dividerColor;
private int dayBackgroundResId;
private int dayTextColorResId;
private int titleTextColor;
private boolean displayHeader;
private int headerTextColor;
private Typeface titleTypeface;
private Typeface dateTypeface;
private OnDateSelectedListener dateListener;
private DateSelectableFilter dateConfiguredListener;
private OnInvalidDateSelectedListener invalidDateListener =
new DefaultOnInvalidDateSelectedListener();
private CellClickInterceptor cellClickInterceptor;
private List<CalendarCellDecorator> decorators;
public void setDecorators(List<CalendarCellDecorator> decorators) {
this.decorators = decorators;
if (null != adapter) {
adapter.notifyDataSetChanged();
}
}
public List<CalendarCellDecorator> getDecorators() {
return decorators;
}
public CalendarPickerView(Context context, AttributeSet attrs) {
super(context, attrs);
Resources res = context.getResources();
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CalendarPickerView);
final int bg = a.getColor(R.styleable.CalendarPickerView_android_background,
res.getColor(R.color.calendar_bg));
dividerColor = a.getColor(R.styleable.CalendarPickerView_dividerColor,
res.getColor(R.color.calendar_divider));
dayBackgroundResId = a.getResourceId(R.styleable.CalendarPickerView_dayBackground,
R.drawable.calendar_bg_selector);
dayTextColorResId = a.getResourceId(R.styleable.CalendarPickerView_dayTextColor,
R.color.calendar_text_selector);
titleTextColor = a.getColor(R.styleable.CalendarPickerView_titleTextColor,
res.getColor(R.color.calendar_text_active));
displayHeader = a.getBoolean(R.styleable.CalendarPickerView_displayHeader, true);
headerTextColor = a.getColor(R.styleable.CalendarPickerView_headerTextColor,
res.getColor(R.color.calendar_text_active));
a.recycle();
adapter = new MonthAdapter();
setDivider(null);
setDividerHeight(0);
setBackgroundColor(bg);
setCacheColorHint(bg);
locale = Locale.getDefault();
today = Calendar.getInstance(locale);
minCal = Calendar.getInstance(locale);
maxCal = Calendar.getInstance(locale);
monthCounter = Calendar.getInstance(locale);
monthNameFormat = new SimpleDateFormat(context.getString(R.string.month_name_format), locale);
weekdayNameFormat = new SimpleDateFormat(context.getString(R.string.day_name_format), locale);
fullDateFormat = DateFormat.getDateInstance(DateFormat.MEDIUM, locale);
if (isInEditMode()) {
Calendar nextYear = Calendar.getInstance(locale);
nextYear.add(Calendar.YEAR, 1);
init(new Date(), nextYear.getTime()) //
.withSelectedDate(new Date());
}
}
/**
* Both date parameters must be non-null and their {@link Date#getTime()} must not return 0. Time
* of day will be ignored. For instance, if you pass in {@code minDate} as 11/16/2012 5:15pm and
* {@code maxDate} as 11/16/2013 4:30am, 11/16/2012 will be the first selectable date and
* 11/15/2013 will be the last selectable date ({@code maxDate} is exclusive).
* <p>
* This will implicitly set the {@link SelectionMode} to {@link SelectionMode#SINGLE}. If you
* want a different selection mode, use {@link FluentInitializer#inMode(SelectionMode)} on the
* {@link FluentInitializer} this method returns.
* <p>
* The calendar will be constructed using the given locale. This means that all names
* (months, days) will be in the language of the locale and the weeks start with the day
* specified by the locale.
*
* @param minDate Earliest selectable date, inclusive. Must be earlier than {@code maxDate}.
* @param maxDate Latest selectable date, exclusive. Must be later than {@code minDate}.
*/
public FluentInitializer init(Date minDate, Date maxDate, Locale locale) {
if (minDate == null || maxDate == null) {
throw new IllegalArgumentException(
"minDate and maxDate must be non-null. " + dbg(minDate, maxDate));
}
if (minDate.after(maxDate)) {
throw new IllegalArgumentException(
"minDate must be before maxDate. " + dbg(minDate, maxDate));
}
if (locale == null) {
throw new IllegalArgumentException("Locale is null.");
}
// Make sure that all calendar instances use the same locale.
this.locale = locale;
today = Calendar.getInstance(locale);
minCal = Calendar.getInstance(locale);
maxCal = Calendar.getInstance(locale);
monthCounter = Calendar.getInstance(locale);
monthNameFormat =
new SimpleDateFormat(getContext().getString(R.string.month_name_format), locale);
for (MonthDescriptor month : months) {
month.setLabel(monthNameFormat.format(month.getDate()));
}
weekdayNameFormat =
new SimpleDateFormat(getContext().getString(R.string.day_name_format), locale);
fullDateFormat = DateFormat.getDateInstance(DateFormat.MEDIUM, locale);
this.selectionMode = SelectionMode.SINGLE;
// Clear out any previously-selected dates/cells.
selectedCals.clear();
selectedCells.clear();
highlightedCals.clear();
highlightedCells.clear();
// Clear previous state.
cells.clear();
months.clear();
minCal.setTime(minDate);
maxCal.setTime(maxDate);
setMidnight(minCal);
setMidnight(maxCal);
displayOnly = false;
// maxDate is exclusive: bump back to the previous day so if maxDate is the first of a month,
// we don't accidentally include that month in the view.
maxCal.add(MINUTE, -1);
// Now iterate between minCal and maxCal and build up our list of months to show.
monthCounter.setTime(minCal.getTime());
final int maxMonth = maxCal.get(MONTH);
final int maxYear = maxCal.get(YEAR);
while ((monthCounter.get(MONTH) <= maxMonth // Up to, including the month.
|| monthCounter.get(YEAR) < maxYear) // Up to the year.
&& monthCounter.get(YEAR) < maxYear + 1) { // But not > next yr.
Date date = monthCounter.getTime();
MonthDescriptor month =
new MonthDescriptor(monthCounter.get(MONTH), monthCounter.get(YEAR), date,
monthNameFormat.format(date));
cells.add(getMonthCells(month, monthCounter));
Logr.d("Adding month %s", month);
months.add(month);
monthCounter.add(MONTH, 1);
}
validateAndUpdate();
return new FluentInitializer();
}
/**
* Both date parameters must be non-null and their {@link Date#getTime()} must not return 0. Time
* of day will be ignored. For instance, if you pass in {@code minDate} as 11/16/2012 5:15pm and
* {@code maxDate} as 11/16/2013 4:30am, 11/16/2012 will be the first selectable date and
* 11/15/2013 will be the last selectable date ({@code maxDate} is exclusive).
* <p>
* This will implicitly set the {@link SelectionMode} to {@link SelectionMode#SINGLE}. If you
* want a different selection mode, use {@link FluentInitializer#inMode(SelectionMode)} on the
* {@link FluentInitializer} this method returns.
* <p>
* The calendar will be constructed using the default locale as returned by
* {@link java.util.Locale#getDefault()}. If you wish the calendar to be constructed using a
* different locale, use {@link #init(java.util.Date, java.util.Date, java.util.Locale)}.
*
* @param minDate Earliest selectable date, inclusive. Must be earlier than {@code maxDate}.
* @param maxDate Latest selectable date, exclusive. Must be later than {@code minDate}.
*/
public FluentInitializer init(Date minDate, Date maxDate) {
return init(minDate, maxDate, Locale.getDefault());
}
public class FluentInitializer {
/** Override the {@link SelectionMode} from the default ({@link SelectionMode#SINGLE}). */
public FluentInitializer inMode(SelectionMode mode) {
selectionMode = mode;
validateAndUpdate();
return this;
}
/**
* Set an initially-selected date. The calendar will scroll to that date if it's not already
* visible.
*/
public FluentInitializer withSelectedDate(Date selectedDates) {
return withSelectedDates(Arrays.asList(selectedDates));
}
/**
* Set multiple selected dates. This will throw an {@link IllegalArgumentException} if you
* pass in multiple dates and haven't already called {@link #inMode(SelectionMode)}.
*/
public FluentInitializer withSelectedDates(Collection<Date> selectedDates) {
if (selectionMode == SelectionMode.SINGLE && selectedDates.size() > 1) {
throw new IllegalArgumentException("SINGLE mode can't be used with multiple selectedDates");
}
if (selectionMode == SelectionMode.RANGE && selectedDates.size() > 2) {
throw new IllegalArgumentException(
"RANGE mode only allows two selectedDates. You tried to pass " + selectedDates.size());
}
if (selectedDates != null) {
for (Date date : selectedDates) {
selectDate(date);
}
}
scrollToSelectedDates();
validateAndUpdate();
return this;
}
public FluentInitializer withHighlightedDates(Collection<Date> dates) {
highlightDates(dates);
return this;
}
public FluentInitializer withHighlightedDate(Date date) {
return withHighlightedDates(Arrays.asList(date));
}
public FluentInitializer setShortWeekdays(String[] newShortWeekdays) {
DateFormatSymbols symbols = new DateFormatSymbols(locale);
symbols.setShortWeekdays(newShortWeekdays);
weekdayNameFormat =
new SimpleDateFormat(getContext().getString(R.string.day_name_format), symbols);
return this;
}
public FluentInitializer displayOnly() {
displayOnly = true;
return this;
}
}
private void validateAndUpdate() {
if (getAdapter() == null) {
setAdapter(adapter);
}
adapter.notifyDataSetChanged();
}
private void scrollToSelectedMonth(final int selectedIndex) {
scrollToSelectedMonth(selectedIndex, false);
}
private void scrollToSelectedMonth(final int selectedIndex, final boolean smoothScroll) {
post(new Runnable() {
@Override public void run() {
Logr.d("Scrolling to position %d", selectedIndex);
if (smoothScroll) {
smoothScrollToPosition(selectedIndex);
} else {
setSelection(selectedIndex);
}
}
});
}
private void scrollToSelectedDates() {
Integer selectedIndex = null;
Integer todayIndex = null;
Calendar today = Calendar.getInstance(locale);
for (int c = 0; c < months.size(); c++) {
MonthDescriptor month = months.get(c);
if (selectedIndex == null) {
for (Calendar selectedCal : selectedCals) {
if (sameMonth(selectedCal, month)) {
selectedIndex = c;
break;
}
}
if (selectedIndex == null && todayIndex == null && sameMonth(today, month)) {
todayIndex = c;
}
}
}
if (selectedIndex != null) {
scrollToSelectedMonth(selectedIndex);
} else if (todayIndex != null) {
scrollToSelectedMonth(todayIndex);
}
}
public boolean scrollToDate(Date date) {
Integer selectedIndex = null;
Calendar cal = Calendar.getInstance(locale);
cal.setTime(date);
for (int c = 0; c < months.size(); c++) {
MonthDescriptor month = months.get(c);
if (sameMonth(cal, month)) {
selectedIndex = c;
break;
}
}
if (selectedIndex != null) {
scrollToSelectedMonth(selectedIndex);
return true;
}
return false;
}
/**
* This method should only be called if the calendar is contained in a dialog, and it should only
* be called once, right after the dialog is shown (using
* {@link android.content.DialogInterface.OnShowListener} or
* {@link android.app.DialogFragment#onStart()}).
*/
public void fixDialogDimens() {
Logr.d("Fixing dimensions to h = %d / w = %d", getMeasuredHeight(), getMeasuredWidth());
// Fix the layout height/width after the dialog has been shown.
getLayoutParams().height = getMeasuredHeight();
getLayoutParams().width = getMeasuredWidth();
// Post this runnable so it runs _after_ the dimen changes have been applied/re-measured.
post(new Runnable() {
@Override public void run() {
Logr.d("Dimens are fixed: now scroll to the selected date");
scrollToSelectedDates();
}
});
}
/**
* Set the typeface to be used for month titles.
*/
public void setTitleTypeface(Typeface titleTypeface) {
this.titleTypeface = titleTypeface;
validateAndUpdate();
}
/**
* Sets the typeface to be used within the date grid.
*/
public void setDateTypeface(Typeface dateTypeface) {
this.dateTypeface = dateTypeface;
validateAndUpdate();
}
/**
* Sets the typeface to be used for all text within this calendar.
*/
public void setTypeface(Typeface typeface) {
setTitleTypeface(typeface);
setDateTypeface(typeface);
}
/**
* This method should only be called if the calendar is contained in a dialog, and it should only
* be called when the screen has been rotated and the dialog should be re-measured.
*/
public void unfixDialogDimens() {
Logr.d("Reset the fixed dimensions to allow for re-measurement");
// Fix the layout height/width after the dialog has been shown.
getLayoutParams().height = LayoutParams.MATCH_PARENT;
getLayoutParams().width = LayoutParams.MATCH_PARENT;
requestLayout();
}
@Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (months.isEmpty()) {
throw new IllegalStateException(
"Must have at least one month to display. Did you forget to call init()?");
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
public Date getSelectedDate() {
return (selectedCals.size() > 0 ? selectedCals.get(0).getTime() : null);
}
public List<Date> getSelectedDates() {
List<Date> selectedDates = new ArrayList<Date>();
for (MonthCellDescriptor cal : selectedCells) {
selectedDates.add(cal.getDate());
}
Collections.sort(selectedDates);
return selectedDates;
}
/** Returns a string summarizing what the client sent us for init() params. */
private static String dbg(Date minDate, Date maxDate) {
return "minDate: " + minDate + "\nmaxDate: " + maxDate;
}
/** Clears out the hours/minutes/seconds/millis of a Calendar. */
static void setMidnight(Calendar cal) {
cal.set(HOUR_OF_DAY, 0);
cal.set(MINUTE, 0);
cal.set(SECOND, 0);
cal.set(MILLISECOND, 0);
}
private class CellClickedListener implements MonthView.Listener {
@Override public void handleClick(MonthCellDescriptor cell) {
Date clickedDate = cell.getDate();
if (cellClickInterceptor != null && cellClickInterceptor.onCellClicked(clickedDate)) {
return;
}
if (!betweenDates(clickedDate, minCal, maxCal) || !isDateSelectable(clickedDate)) {
if (invalidDateListener != null) {
invalidDateListener.onInvalidDateSelected(clickedDate);
}
} else {
boolean wasSelected = doSelectDate(clickedDate, cell);
if (dateListener != null) {
if (wasSelected) {
dateListener.onDateSelected(clickedDate);
} else {
dateListener.onDateUnselected(clickedDate);
}
}
}
}
}
/**
* Select a new date. Respects the {@link SelectionMode} this CalendarPickerView is configured
* with: if you are in {@link SelectionMode#SINGLE}, the previously selected date will be
* un-selected. In {@link SelectionMode#MULTIPLE}, the new date will be added to the list of
* selected dates.
* <p>
* If the selection was made (selectable date, in range), the view will scroll to the newly
* selected date if it's not already visible.
*
* @return - whether we were able to set the date
*/
public boolean selectDate(Date date) {
return selectDate(date, false);
}
/**
* Select a new date. Respects the {@link SelectionMode} this CalendarPickerView is configured
* with: if you are in {@link SelectionMode#SINGLE}, the previously selected date will be
* un-selected. In {@link SelectionMode#MULTIPLE}, the new date will be added to the list of
* selected dates.
* <p>
* If the selection was made (selectable date, in range), the view will scroll to the newly
* selected date if it's not already visible.
*
* @return - whether we were able to set the date
*/
public boolean selectDate(Date date, boolean smoothScroll) {
validateDate(date);
MonthCellWithMonthIndex monthCellWithMonthIndex = getMonthCellWithIndexByDate(date);
if (monthCellWithMonthIndex == null || !isDateSelectable(date)) {
return false;
}
boolean wasSelected = doSelectDate(date, monthCellWithMonthIndex.cell);
if (wasSelected) {
scrollToSelectedMonth(monthCellWithMonthIndex.monthIndex, smoothScroll);
}
return wasSelected;
}
private void validateDate(Date date) {
if (date == null) {
throw new IllegalArgumentException("Selected date must be non-null.");
}
if (date.before(minCal.getTime()) || date.after(maxCal.getTime())) {
throw new IllegalArgumentException(String.format(
"SelectedDate must be between minDate and maxDate."
+ "%nminDate: %s%nmaxDate: %s%nselectedDate: %s", minCal.getTime(), maxCal.getTime(),
date));
}
}
private boolean doSelectDate(Date date, MonthCellDescriptor cell) {
Calendar newlySelectedCal = Calendar.getInstance(locale);
newlySelectedCal.setTime(date);
// Sanitize input: clear out the hours/minutes/seconds/millis.
setMidnight(newlySelectedCal);
// Clear any remaining range state.
for (MonthCellDescriptor selectedCell : selectedCells) {
selectedCell.setRangeState(RangeState.NONE);
}
switch (selectionMode) {
case RANGE:
if (selectedCals.size() > 1) {
// We've already got a range selected: clear the old one.
clearOldSelections();
} else if (selectedCals.size() == 1 && newlySelectedCal.before(selectedCals.get(0))) {
// We're moving the start of the range back in time: clear the old start date.
clearOldSelections();
}
break;
case MULTIPLE:
date = applyMultiSelect(date, newlySelectedCal);
break;
case SINGLE:
clearOldSelections();
break;
default:
throw new IllegalStateException("Unknown selectionMode " + selectionMode);
}
if (date != null) {
// Select a new cell.
if (selectedCells.size() == 0 || !selectedCells.get(0).equals(cell)) {
selectedCells.add(cell);
cell.setSelected(true);
}
selectedCals.add(newlySelectedCal);
if (selectionMode == SelectionMode.RANGE && selectedCells.size() > 1) {
// Select all days in between start and end.
Date start = selectedCells.get(0).getDate();
Date end = selectedCells.get(1).getDate();
selectedCells.get(0).setRangeState(MonthCellDescriptor.RangeState.FIRST);
selectedCells.get(1).setRangeState(MonthCellDescriptor.RangeState.LAST);
for (List<List<MonthCellDescriptor>> month : cells) {
for (List<MonthCellDescriptor> week : month) {
for (MonthCellDescriptor singleCell : week) {
if (singleCell.getDate().after(start)
&& singleCell.getDate().before(end)
&& singleCell.isSelectable()) {
singleCell.setSelected(true);
singleCell.setRangeState(MonthCellDescriptor.RangeState.MIDDLE);
selectedCells.add(singleCell);
}
}
}
}
}
}
// Update the adapter.
validateAndUpdate();
return date != null;
}
private void clearOldSelections() {
for (MonthCellDescriptor selectedCell : selectedCells) {
// De-select the currently-selected cell.
selectedCell.setSelected(false);
if (dateListener != null) {
Date selectedDate = selectedCell.getDate();
if (selectionMode == SelectionMode.RANGE) {
int index = selectedCells.indexOf(selectedCell);
if (index == 0 || index == selectedCells.size() - 1) {
dateListener.onDateUnselected(selectedDate);
}
} else {
dateListener.onDateUnselected(selectedDate);
}
}
}
selectedCells.clear();
selectedCals.clear();
}
private Date applyMultiSelect(Date date, Calendar selectedCal) {
for (MonthCellDescriptor selectedCell : selectedCells) {
if (selectedCell.getDate().equals(date)) {
// De-select the currently-selected cell.
selectedCell.setSelected(false);
selectedCells.remove(selectedCell);
date = null;
break;
}
}
for (Calendar cal : selectedCals) {
if (sameDate(cal, selectedCal)) {
selectedCals.remove(cal);
break;
}
}
return date;
}
public void highlightDates(Collection<Date> dates) {
for (Date date : dates) {
validateDate(date);
MonthCellWithMonthIndex monthCellWithMonthIndex = getMonthCellWithIndexByDate(date);
if (monthCellWithMonthIndex != null) {
Calendar newlyHighlightedCal = Calendar.getInstance();
newlyHighlightedCal.setTime(date);
MonthCellDescriptor cell = monthCellWithMonthIndex.cell;
highlightedCells.add(cell);
highlightedCals.add(newlyHighlightedCal);
cell.setHighlighted(true);
}
}
validateAndUpdate();
}
public void clearHighlightedDates() {
for (MonthCellDescriptor cal : highlightedCells) {
cal.setHighlighted(false);
}
highlightedCells.clear();
highlightedCals.clear();
validateAndUpdate();
}
/** Hold a cell with a month-index. */
private static class MonthCellWithMonthIndex {
public MonthCellDescriptor cell;
public int monthIndex;
public MonthCellWithMonthIndex(MonthCellDescriptor cell, int monthIndex) {
this.cell = cell;
this.monthIndex = monthIndex;
}
}
/** Return cell and month-index (for scrolling) for a given Date. */
private MonthCellWithMonthIndex getMonthCellWithIndexByDate(Date date) {
int index = 0;
Calendar searchCal = Calendar.getInstance(locale);
searchCal.setTime(date);
Calendar actCal = Calendar.getInstance(locale);
for (List<List<MonthCellDescriptor>> monthCells : cells) {
for (List<MonthCellDescriptor> weekCells : monthCells) {
for (MonthCellDescriptor actCell : weekCells) {
actCal.setTime(actCell.getDate());
if (sameDate(actCal, searchCal) && actCell.isSelectable()) {
return new MonthCellWithMonthIndex(actCell, index);
}
}
}
index++;
}
return null;
}
private class MonthAdapter extends BaseAdapter {
private final LayoutInflater inflater;
private MonthAdapter() {
inflater = LayoutInflater.from(getContext());
}
@Override public boolean isEnabled(int position) {
// Disable selectability: each cell will handle that itself.
return false;
}
@Override public int getCount() {
return months.size();
}
@Override public Object getItem(int position) {
return months.get(position);
}
@Override public long getItemId(int position) {
return position;
}
@Override public View getView(int position, View convertView, ViewGroup parent) {
MonthView monthView = (MonthView) convertView;
if (monthView == null) {
monthView =
MonthView.create(parent, inflater, weekdayNameFormat, listener, today, dividerColor,
dayBackgroundResId, dayTextColorResId, titleTextColor, displayHeader,
headerTextColor, decorators, locale);
} else {
monthView.setDecorators(decorators);
}
monthView.init(months.get(position), cells.get(position), displayOnly, titleTypeface,
dateTypeface);
return monthView;
}
}
List<List<MonthCellDescriptor>> getMonthCells(MonthDescriptor month, Calendar startCal) {
Calendar cal = Calendar.getInstance(locale);
cal.setTime(startCal.getTime());
List<List<MonthCellDescriptor>> cells = new ArrayList<List<MonthCellDescriptor>>();
cal.set(DAY_OF_MONTH, 1);
int firstDayOfWeek = cal.get(DAY_OF_WEEK);
int offset = cal.getFirstDayOfWeek() - firstDayOfWeek;
if (offset > 0) {
offset -= 7;
}
cal.add(Calendar.DATE, offset);
Calendar minSelectedCal = minDate(selectedCals);
Calendar maxSelectedCal = maxDate(selectedCals);
while ((cal.get(MONTH) < month.getMonth() + 1 || cal.get(YEAR) < month.getYear()) //
&& cal.get(YEAR) <= month.getYear()) {
Logr.d("Building week row starting at %s", cal.getTime());
List<MonthCellDescriptor> weekCells = new ArrayList<MonthCellDescriptor>();
cells.add(weekCells);
for (int c = 0; c < 7; c++) {
Date date = cal.getTime();
boolean isCurrentMonth = cal.get(MONTH) == month.getMonth();
boolean isSelected = isCurrentMonth && containsDate(selectedCals, cal);
boolean isSelectable =
isCurrentMonth && betweenDates(cal, minCal, maxCal) && isDateSelectable(date);
boolean isToday = sameDate(cal, today);
boolean isHighlighted = containsDate(highlightedCals, cal);
int value = cal.get(DAY_OF_MONTH);
MonthCellDescriptor.RangeState rangeState = MonthCellDescriptor.RangeState.NONE;
if (selectedCals.size() > 1) {
if (sameDate(minSelectedCal, cal)) {
rangeState = MonthCellDescriptor.RangeState.FIRST;
} else if (sameDate(maxDate(selectedCals), cal)) {
rangeState = MonthCellDescriptor.RangeState.LAST;
} else if (betweenDates(cal, minSelectedCal, maxSelectedCal)) {
rangeState = MonthCellDescriptor.RangeState.MIDDLE;
}
}
weekCells.add(
new MonthCellDescriptor(date, isCurrentMonth, isSelectable, isSelected, isToday,
isHighlighted, value, rangeState));
cal.add(DATE, 1);
}
}
return cells;
}
private boolean containsDate(List<Calendar> selectedCals, Date date) {
Calendar cal = Calendar.getInstance(locale);
cal.setTime(date);
return containsDate(selectedCals, cal);
}
private static boolean containsDate(List<Calendar> selectedCals, Calendar cal) {
for (Calendar selectedCal : selectedCals) {
if (sameDate(cal, selectedCal)) {
return true;
}
}
return false;
}
private static Calendar minDate(List<Calendar> selectedCals) {
if (selectedCals == null || selectedCals.size() == 0) {
return null;
}
Collections.sort(selectedCals);
return selectedCals.get(0);
}
private static Calendar maxDate(List<Calendar> selectedCals) {
if (selectedCals == null || selectedCals.size() == 0) {
return null;
}
Collections.sort(selectedCals);
return selectedCals.get(selectedCals.size() - 1);
}
private static boolean sameDate(Calendar cal, Calendar selectedDate) {
return cal.get(MONTH) == selectedDate.get(MONTH)
&& cal.get(YEAR) == selectedDate.get(YEAR)
&& cal.get(DAY_OF_MONTH) == selectedDate.get(DAY_OF_MONTH);
}
private static boolean betweenDates(Calendar cal, Calendar minCal, Calendar maxCal) {
final Date date = cal.getTime();
return betweenDates(date, minCal, maxCal);
}
static boolean betweenDates(Date date, Calendar minCal, Calendar maxCal) {
final Date min = minCal.getTime();
return (date.equals(min) || date.after(min)) // >= minCal
&& date.before(maxCal.getTime()); // && < maxCal
}
private static boolean sameMonth(Calendar cal, MonthDescriptor month) {
return (cal.get(MONTH) == month.getMonth() && cal.get(YEAR) == month.getYear());
}
private boolean isDateSelectable(Date date) {
return dateConfiguredListener == null || dateConfiguredListener.isDateSelectable(date);
}
public void setOnDateSelectedListener(OnDateSelectedListener listener) {
dateListener = listener;
}
/**
* Set a listener to react to user selection of a disabled date.
*
* @param listener the listener to set, or null for no reaction
*/
public void setOnInvalidDateSelectedListener(OnInvalidDateSelectedListener listener) {
invalidDateListener = listener;
}
/**
* Set a listener used to discriminate between selectable and unselectable dates. Set this to
* disable arbitrary dates as they are rendered.
* <p>
* Important: set this before you call {@link #init(Date, Date)} methods. If called afterwards,
* it will not be consistently applied.
*/
public void setDateSelectableFilter(DateSelectableFilter listener) {
dateConfiguredListener = listener;
}
/** Set a listener to intercept clicks on calendar cells. */
public void setCellClickInterceptor(CellClickInterceptor listener) {
cellClickInterceptor = listener;
}
/**
* Interface to be notified when a new date is selected or unselected. This will only be called
* when the user initiates the date selection. If you call {@link #selectDate(Date)} this
* listener will not be notified.
*
* @see #setOnDateSelectedListener(OnDateSelectedListener)
*/
public interface OnDateSelectedListener {
void onDateSelected(Date date);
void onDateUnselected(Date date);
}
/**
* Interface to be notified when an invalid date is selected by the user. This will only be
* called when the user initiates the date selection. If you call {@link #selectDate(Date)} this
* listener will not be notified.
*
* @see #setOnInvalidDateSelectedListener(OnInvalidDateSelectedListener)
*/
public interface OnInvalidDateSelectedListener {
void onInvalidDateSelected(Date date);
}
/**
* Interface used for determining the selectability of a date cell when it is configured for
* display on the calendar.
*
* @see #setDateSelectableFilter(DateSelectableFilter)
*/
public interface DateSelectableFilter {
boolean isDateSelectable(Date date);
}
/**
* Interface to be notified when a cell is clicked and possibly intercept the click. Return true
* to intercept the click and prevent any selections from changing.
*
* @see #setCellClickInterceptor(CellClickInterceptor)
*/
public interface CellClickInterceptor {
boolean onCellClicked(Date date);
}
private class DefaultOnInvalidDateSelectedListener implements OnInvalidDateSelectedListener {
@Override public void onInvalidDateSelected(Date date) {
String errMessage =
getResources().getString(R.string.invalid_date, fullDateFormat.format(minCal.getTime()),
fullDateFormat.format(maxCal.getTime()));
Toast.makeText(getContext(), errMessage, Toast.LENGTH_SHORT).show();
}
}
}
| |
//
// FPlayAndroid is distributed under the FreeBSD License
//
// Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
//
// https://github.com/carlosrafaelgn/FPlayAndroid
//
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.RectF;
import android.os.Build;
import android.os.SystemClock;
import br.com.carlosrafaelgn.fplay.ui.UI;
/**
* This class performs the graphical effect used at the edges of scrollable widgets
* when the user scrolls beyond the content bounds in 2D space.
*
* <p>EdgeEffect is stateful. Custom widgets using EdgeEffect should create an
* instance for each edge that should show the effect, feed it input data using
* the methods {@link #onAbsorb(int)}, {@link #onPull(float)}, and {@link #onRelease()},
* and draw the effect using {@link #draw(Canvas)} in the widget's overridden
* {@link android.view.View#draw(Canvas)} method. If {@link #isFinished()} returns
* false after drawing, the edge effect's animation is not yet complete and the widget
* should schedule another drawing pass to continue the animation.</p>
*
* <p>When drawing, widgets should draw their main content and child views first,
* usually by invoking <code>super.draw(canvas)</code> from an overridden <code>draw</code>
* method. (This will invoke onDraw and dispatch drawing to child views as needed.)
* The edge effect may then be drawn on top of the view's content using the
* {@link #draw(Canvas)} method.</p>
*/
@SuppressWarnings("unused")
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public class BgEdgeEffect extends EdgeEffect {
private static final int STATE_IDLE = 0;
private static final int STATE_PULL = 1;
private static final int STATE_ABSORB = 2;
private static final int STATE_RECEDE = 3;
private static final int STATE_PULL_DECAY = 4;
private static final float VELOCITY_GLOW_FACTOR = 6.0f;
// Time it will take the effect to fully recede in ms
private static final float RECEDE_TIME = 600.0f;
// Time it will take before a pulled glow begins receding in ms
private static final float PULL_TIME = 167.0f;
// Time it will take in ms for a pulled glow to decay to partial strength before release
private static final float PULL_DECAY_TIME = 2000.0f;
private static final float MIN_ALPHA = 0.25f;
private static final float MAX_ALPHA = 0.65f;
// Minimum velocity that will be absorbed
private static final int MIN_VELOCITY = 100;
// Maximum velocity, clamps at this value
private static final int MAX_VELOCITY = 10000;
private float mAlpha, mAlphaStart, mAlphaFinish;
private float mScaleY, mScaleYStart, mScaleYFinish;
private int mState, mStartTime, mPullLastTime;
private float mDuration, mPullDistance, mTargetPullScaleY;
public int mOffsetY;
private int mX, mY, mWidth;
private final int mColor, mMaxHeight;
/**
* Construct a new EdgeEffect with a theme appropriate for the provided context.
* @param context Context used to provide theming and resource information for the EdgeEffect
*/
public BgEdgeEffect(Context context, int primaryColor) {
super(context);
mColor = primaryColor & 0x00ffffff;
//a little bit below SongView's height
mMaxHeight = (UI._1dp << 1) + UI.verticalMargin + UI._22spBox + UI._14spBox; // ((3 * UI.screenHeight) >> 5);
}
/**
* Set the size of this edge effect in pixels.
*
* @param width Effect width in pixels
* @param height Effect height in pixels
*/
public void setSize(int width, int height) {
mWidth = width;
}
/**
* Set the position of this edge effect in pixels. This position is
* only used by {@link #getBounds(boolean)}.
*
* @param x The position of the edge effect on the X axis
* @param y The position of the edge effect on the Y axis
*/
public void setPosition(int x, int y) {
mX = x;
mY = y;
}
/**
* Returns the bounds of the edge effect.
*/
public Rect getBounds(boolean reverse) {
final Rect rect = UI.rect;
final int dy = mY - (reverse ? (mMaxHeight + mOffsetY) : 0);
rect.left = mX;
rect.top = dy;
rect.right = mX + mWidth;
rect.bottom = dy + mMaxHeight + mOffsetY;
return rect;
}
/**
* Reports if this EdgeEffect's animation is finished. If this method returns false
* after a call to {@link #draw(Canvas)} the host widget should schedule another
* drawing pass to continue the animation.
*
* @return true if animation is finished, false if drawing should continue on the next frame.
*/
public boolean isFinished() {
return mState == STATE_IDLE;
}
/**
* Immediately finish the current animation.
* After this call {@link #isFinished()} will return true.
*/
public void finish() {
mState = STATE_IDLE;
}
/**
* A view should call this when content is pulled away from an edge by the user.
* This will update the state of the current visual effect and its associated animation.
* The host view should always {@link android.view.View#invalidate()} after this
* and draw the results accordingly.
*
* <p>Views using EdgeEffect should favor {@link #onPull(float, float)} when the displacement
* of the pull point is known.</p>
*
* @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to
* 1.f (full length of the view) or negative values to express change
* back toward the edge reached to initiate the effect.
*/
public void onPull(float deltaDistance) {
onPull(deltaDistance, 0.5f);
}
/**
* A view should call this when content is pulled away from an edge by the user.
* This will update the state of the current visual effect and its associated animation.
* The host view should always {@link android.view.View#invalidate()} after this
* and draw the results accordingly.
*
* @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to
* 1.f (full length of the view) or negative values to express change
* back toward the edge reached to initiate the effect.
* @param displacement The displacement from the starting side of the effect of the point
* initiating the pull. In the case of touch this is the finger position.
* Values may be from 0-1.
*/
public void onPull(float deltaDistance, float displacement) {
final int now = (int)SystemClock.uptimeMillis();
if (mState == STATE_PULL_DECAY && (now - mStartTime) < (int)mDuration)
return;
if (mState != STATE_PULL) {
mState = STATE_PULL;
mPullLastTime = now;
}
mStartTime = now;
mDuration = PULL_TIME;
if (deltaDistance < 0.0f)
mPullDistance -= deltaDistance;
else
mPullDistance += deltaDistance;
mTargetPullScaleY = Math.max(0.0f, Math.min(mPullDistance * 5.0f, 1.0f)); //mScaleY = (d * d * (3.0f - (2.0f * d)));
//mAlpha = mScaleY * MAX_ALPHA;
//mAlphaStart = mAlpha;
//mAlphaFinish = mAlpha;
//mScaleYStart = mScaleY;
//mScaleYFinish = mScaleY;
}
/**
* Call when the object is released after being pulled.
* This will begin the "decay" phase of the effect. After calling this method
* the host view should {@link android.view.View#invalidate()} and thereby
* draw the results accordingly.
*/
public void onRelease() {
mPullDistance = 0.0f;
if (mState != STATE_PULL && mState != STATE_PULL_DECAY)
return;
mAlphaStart = mAlpha;
mAlphaFinish = 0.0f;
mScaleYStart = mScaleY;
mScaleYFinish = 0.0f;
mState = STATE_RECEDE;
mStartTime = (int)SystemClock.uptimeMillis();
mDuration = RECEDE_TIME;
}
/**
* Call when the effect absorbs an impact at the given velocity.
* Used when a fling reaches the scroll boundary.
*
* <p>When using a {@link android.widget.Scroller} or {@link android.widget.OverScroller},
* the method <code>getCurrVelocity</code> will provide a reasonable approximation
* to use here.</p>
*
* @param velocity Velocity at impact in pixels per second.
*/
public void onAbsorb(int velocity) {
if (velocity < 0)
velocity = -velocity;
final float fvel = (float)Math.max(MIN_VELOCITY, Math.min(velocity, MAX_VELOCITY));
mPullDistance = 0.0f;
mState = STATE_ABSORB;
mStartTime = (int)SystemClock.uptimeMillis();
mDuration = 0.15f + (fvel * 0.02f);
// The glow depends more on the velocity, and therefore starts out
// nearly invisible.
mAlphaStart = MIN_ALPHA;
// Alpha should change for the glow as well as size.
mAlphaFinish = fvel * VELOCITY_GLOW_FACTOR * 0.00001f;
if (mAlphaFinish > MAX_ALPHA)
mAlphaFinish = MAX_ALPHA;
else if (mAlphaFinish < mAlphaStart)
mAlphaFinish = mAlphaStart;
mScaleYStart = 0.0f;
// Growth for the size of the glow should be quadratic to properly
// respond
// to a user's scrolling speed. The faster the scrolling speed, the more
// intense the effect should be for both the size and the saturation.
mScaleYFinish = 0.025f + (fvel * fvel * 0.00000075f);
if (mScaleYFinish > 1.0f)
mScaleYFinish = 1.0f;
}
/**
* Set the color of this edge effect in argb.
*
* @param color Color in argb
*/
public void setColor(int color) {
}
/**
* Return the color of this edge effect in argb.
* @return The color of this edge effect in argb
*/
public int getColor() {
return mColor;
}
/**
* Draw into the provided canvas. Assumes that the canvas has been rotated
* accordingly and the size has been set. The effect will be drawn the full
* width of X=0 to X=width, beginning from Y=0 and extending to some factor <
* 1.f of height.
*
* @param canvas Canvas to draw into
* @return true if drawing should continue beyond this frame to continue the
* animation
*/
public boolean draw(Canvas canvas) {
if (mState == STATE_IDLE)
return false;
final int now = (int)SystemClock.uptimeMillis();
float t = (float)(now - mStartTime) / mDuration;
if (t > 1.0f)
t = 1.0f;
if (mState == STATE_PULL) {
final float coefNew = (float)(now - mPullLastTime) * (0.140625f / 16.0f); //0.140625f @ 60fps (~16ms)
mScaleY = (mTargetPullScaleY * coefNew) + (mScaleY * (1.0f - coefNew));
mPullLastTime = now;
if (mScaleY > mTargetPullScaleY)
mScaleY = mTargetPullScaleY;
mAlpha = (mScaleY * (MAX_ALPHA - MIN_ALPHA)) + MIN_ALPHA;
} else {
float interp = 1.0f - t;
interp = (1.0f - (interp * interp));
mAlpha = mAlphaStart + ((mAlphaFinish - mAlphaStart) * interp);
mScaleY = mScaleYStart + ((mScaleYFinish - mScaleYStart) * interp);
if (mAlpha >= MAX_ALPHA)
mAlpha = MAX_ALPHA;
if (mScaleY >= 1.0f)
mScaleY = 1.0f;
}
if (t >= 1.0f) {
switch (mState) {
case STATE_ABSORB:
mState = STATE_RECEDE;
mStartTime = now;
mDuration = RECEDE_TIME;
mAlphaStart = mAlpha;
mScaleYStart = mScaleY;
// After absorb, the glow should fade to nothing.
mAlphaFinish = 0.0f;
mScaleYFinish = 0.0f;
break;
case STATE_PULL:
mState = STATE_PULL_DECAY;
mStartTime = now;
mDuration = PULL_DECAY_TIME;
mAlphaStart = mAlpha;
mScaleYStart = mScaleY;
// After pull, the glow should fade to nothing.
mAlphaFinish = 0.0f;
mScaleYFinish = 0.0f;
break;
case STATE_PULL_DECAY:
mState = STATE_RECEDE;
break;
case STATE_RECEDE:
mState = STATE_IDLE;
break;
}
}
final Rect rect = UI.rect;
final RectF rectF = UI.rectF;
canvas.save();
UI.fillPaint.setAntiAlias(true);
UI.fillPaint.setColor(((int)(255.0f * Math.min(mAlpha, 1.0f)) << 24) | mColor);
rect.left = mX;
rect.top = mOffsetY;
rect.right = mX + mWidth;
rect.bottom = mMaxHeight + mOffsetY;
canvas.clipRect(rect);
rectF.left = (float)(mX - UI._18sp);
rectF.right = (float)(mX + mWidth + UI._18sp);
rectF.bottom = (mScaleY * (float)(mMaxHeight + mOffsetY));
rectF.top = -rectF.bottom + (float)mOffsetY;
canvas.drawOval(rectF, UI.fillPaint);
UI.fillPaint.setAntiAlias(false);
canvas.restore();
return true;
}
/**
* Return the maximum height that the edge effect will be drawn at given the original
* {@link #setSize(int, int) input size}.
* @return The maximum height of the edge effect
*/
public int getMaxHeight() {
return mMaxHeight + mOffsetY;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.