gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.applib.layout.component; import java.io.Serializable; import java.util.List; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import com.google.common.collect.Lists; import org.apache.isis.applib.annotation.LabelPosition; import org.apache.isis.applib.annotation.PromptStyle; import org.apache.isis.applib.annotation.Where; /** * Describes the layout of a single property, broadly corresponds to the {@link org.apache.isis.applib.annotation.PropertyLayout} annotation. */ @XmlRootElement( name = "property" ) @XmlType( name = "property" , propOrder = { "named" , "describedAs" , "actions" , "metadataError" } ) public class PropertyLayoutData implements ActionLayoutDataOwner, Serializable, Owned<FieldSet>, HasCssClass, HasDescribedAs, HasHidden, HasNamed { private static final long serialVersionUID = 1L; public PropertyLayoutData() { } public PropertyLayoutData(final String id) { this.id = id; } private String id; /** * Property identifier, being the getter method without "get" or "is" prefix, first letter lower cased. */ @XmlAttribute(required = true) public String getId() { return id; } public void setId(String id) { this.id = id; } private String cssClass; @Override @XmlAttribute(required = false) public String getCssClass() { return cssClass; } @Override public void setCssClass(String cssClass) { this.cssClass = cssClass; } private String describedAs; @Override @XmlElement(required = false) public String getDescribedAs() { return describedAs; } @Override public void setDescribedAs(String describedAs) { this.describedAs = describedAs; } private Where hidden; @Override @XmlAttribute(required = false) public Where getHidden() { return hidden; } @Override public void setHidden(Where hidden) { this.hidden = hidden; } private LabelPosition labelPosition; @XmlAttribute(required = false) public LabelPosition getLabelPosition() { return labelPosition; } public void setLabelPosition(LabelPosition labelPosition) { this.labelPosition = labelPosition; } private Integer multiLine; @XmlAttribute(required = false) public Integer getMultiLine() { return multiLine; } public void setMultiLine(Integer multiLine) { this.multiLine = multiLine; } private String named; @Override @XmlElement(required = false) public String getNamed() { return named; } @Override public void setNamed(String named) { this.named = named; } private Boolean namedEscaped; @Override @XmlAttribute(required = false) public Boolean getNamedEscaped() { return namedEscaped; } @Override public void setNamedEscaped(Boolean namedEscaped) { this.namedEscaped = namedEscaped; } private PromptStyle promptStyle; @XmlAttribute(required = false) public PromptStyle getPromptStyle() { return promptStyle; } public void setPromptStyle(PromptStyle promptStyle) { this.promptStyle = promptStyle; } private Boolean renderedAsDayBefore; @XmlAttribute(required = false) public Boolean getRenderedAsDayBefore() { return renderedAsDayBefore; } public void setRenderedAsDayBefore(Boolean renderedAsDayBefore) { this.renderedAsDayBefore = renderedAsDayBefore; } private Integer typicalLength; @XmlAttribute(required = false) public Integer getTypicalLength() { return typicalLength; } public void setTypicalLength(Integer typicalLength) { this.typicalLength = typicalLength; } private Boolean unchanging; @XmlAttribute(required = false) public Boolean getUnchanging() { return unchanging; } public void setUnchanging(Boolean unchanging) { this.unchanging = unchanging; } private List<ActionLayoutData> actions = Lists.newArrayList(); // no wrapper @XmlElement(name = "action", required = false) public List<ActionLayoutData> getActions() { return actions; } public void setActions(List<ActionLayoutData> actionLayoutDatas) { this.actions = actionLayoutDatas; } private FieldSet owner; /** * Owner. * * <p> * Set programmatically by framework after reading in from XML. * </p> */ @XmlTransient public FieldSet getOwner() { return owner; } public void setOwner(final FieldSet owner) { this.owner = owner; } private String metadataError; /** * For diagnostics; populated by the framework if and only if a metadata error. */ @XmlElement(required = false) public String getMetadataError() { return metadataError; } public void setMetadataError(final String metadataError) { this.metadataError = metadataError; } @Override public String toString() { return "PropertyLayoutData{" + "id='" + id + '\'' + '}'; } }
/* * Copyright 2011 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.factmodel.traits; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.BitSet; import java.util.Map; import java.util.Set; import org.drools.core.factmodel.BuildUtils; import org.drools.core.factmodel.ClassDefinition; import org.drools.core.factmodel.FieldDefinition; import org.drools.core.factmodel.traits.TraitBuilderUtil.MixinInfo; import org.drools.core.util.ExternalizableLinkedHashMap; import org.kie.api.definition.type.FactField; import org.mvel2.asm.ClassVisitor; import org.mvel2.asm.ClassWriter; import org.mvel2.asm.FieldVisitor; import org.mvel2.asm.Label; import org.mvel2.asm.MethodVisitor; import org.mvel2.asm.Type; import static org.drools.core.factmodel.traits.TraitBuilderUtil.buildMixinMethods; import static org.drools.core.factmodel.traits.TraitBuilderUtil.findMixinInfo; import static org.drools.core.factmodel.traits.TraitBuilderUtil.getMixinName; import static org.drools.core.rule.builder.dialect.asm.ClassGenerator.createClassWriter; public class TraitMapProxyClassBuilderImpl extends AbstractProxyClassBuilderImpl implements TraitProxyClassBuilder, Serializable { public byte[] buildClass( ClassDefinition core, ClassLoader classLoader ) throws IOException, SecurityException, IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchFieldException { FieldVisitor fv; MethodVisitor mv; // get the method bitmask BitSet mask = traitRegistry.getFieldMask( getTrait().getName(), core.getDefinedClass().getName() ); String name = TraitFactory.getPropertyWrapperName( getTrait(), core ); String masterName = TraitFactory.getProxyName( getTrait(), core ); Class<?> traitClass = getTrait().getDefinedClass(); String internalWrapper = BuildUtils.getInternalType( name ); String internalProxy = BuildUtils.getInternalType( masterName ); String descrCore = Type.getDescriptor( core.getDefinedClass() ); String internalCore = Type.getInternalName( core.getDefinedClass() ); String internalTrait = Type.getInternalName( traitClass ); MixinInfo mixinInfo = findMixinInfo(traitClass); ClassWriter cw = createClassWriter( classLoader, ACC_PUBLIC + ACC_SUPER, internalProxy, null, Type.getInternalName( proxyBaseClass ), new String[]{ internalTrait, Type.getInternalName( Serializable.class ) } ); { fv = cw.visitField( ACC_PRIVATE + ACC_FINAL + ACC_STATIC, TraitType.traitNameField, Type.getDescriptor( String.class ), null, null ); fv.visitEnd(); } { fv = cw.visitField( ACC_PUBLIC, "object", descrCore, null, null ); fv.visitEnd(); } { fv = cw.visitField( ACC_PUBLIC, "map", Type.getDescriptor( Map.class ), "Ljava/util/Map<Ljava/lang/String;Ljava/lang/Object;>;", null ); fv.visitEnd(); } if ( mixinInfo != null ) { for ( Class<?> mixinClass : mixinInfo.mixinClasses ) { { fv = cw.visitField( ACC_PRIVATE, getMixinName(mixinClass), BuildUtils.getTypeDescriptor( mixinClass.getName() ), null, null ); fv.visitEnd(); } } } { mv = cw.visitMethod( ACC_PUBLIC, "<init>", "(" + descrCore + Type.getDescriptor( Map.class ) + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( boolean.class ) + ")V", "(" + descrCore + "Ljava/util/Map<Ljava/lang/String;Ljava/lang/Object;>;" + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( boolean.class ) + ")V", null); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "<init>", "()V", false ); mv.visitVarInsn( ALOAD, 2 ); Label l0 = new Label(); mv.visitJumpInsn( IFNONNULL, l0 ); mv.visitTypeInsn( NEW, Type.getInternalName( ExternalizableLinkedHashMap.class ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( ExternalizableLinkedHashMap.class ), "<init>", "()V", false ); mv.visitVarInsn( ASTORE, 2 ); mv.visitLabel( l0 ); if ( mixinInfo != null ) { for ( Class<?> mixinClass : mixinInfo.mixinClasses ) { String mixin = getMixinName( mixinClass ); try { Class actualArg = getPossibleConstructor( mixinClass, trait.getDefinedClass() ); mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, Type.getInternalName( mixinClass ) ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( mixinClass ), "<init>", "(" + Type.getDescriptor( actualArg ) + ")V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, mixin, Type.getDescriptor( mixinClass ) ); } catch (NoSuchMethodException nsme) { mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, Type.getInternalName( mixinClass ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( mixinClass ), "<init>", "()V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, mixin, Type.getDescriptor( mixinClass ) ); } } } mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitFieldInsn( PUTFIELD, internalProxy, "object", descrCore ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitFieldInsn( PUTFIELD, internalProxy, "map", Type.getDescriptor( Map.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 3 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalProxy, "setTypeCode", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( BitSet.class ) ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, internalWrapper ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKESPECIAL, internalWrapper, "<init>", "(" + descrCore + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, "fields", Type.getDescriptor( Map.class ) ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_getDynamicProperties", "()" + Type.getDescriptor( Map.class ), false ); Label l1 = new Label(); mv.visitJumpInsn( IFNONNULL, l1 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setDynamicProperties", "(" + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitLabel( l1 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_getTraitMap", "()" + Type.getDescriptor( Map.class ), false ); Label l2 = new Label(); mv.visitJumpInsn( IFNONNULL, l2 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( NEW, Type.getInternalName( TraitTypeMap.class ) ); mv.visitInsn( DUP ); mv.visitTypeInsn( NEW, Type.getInternalName( ExternalizableLinkedHashMap.class ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( ExternalizableLinkedHashMap.class ), "<init>", "()V", false ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( TraitTypeMap.class ), "<init>", "(" + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setTraitMap", "(" + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitLabel( l2 ); // core._setBottomTypeCode() mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 4 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setBottomTypeCode", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( BitSet.class ) ), false ); // core.addTrait mv.visitVarInsn( ALOAD, 1 ); mv.visitLdcInsn( trait.getName().endsWith( TraitFactory.SUFFIX ) ? trait.getName().replace( TraitFactory.SUFFIX , "" ) : trait.getName() ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "addTrait", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( String.class ), Type.getType( Thing.class ) ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ILOAD, 5 ); mv.visitMethodInsn( INVOKESPECIAL, internalProxy, "synchFields", Type.getMethodDescriptor( Type.VOID_TYPE, Type.BOOLEAN_TYPE ), false ); mv.visitInsn( RETURN ); // mv.visitMaxs( 5, 3 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } { mv = cw.visitMethod( ACC_PUBLIC, "writeExternal", "(" + Type.getDescriptor( ObjectOutput.class ) + ")V", null, new String[] { Type.getInternalName( IOException.class ) } ); mv.visitCode(); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalProxy, "getObject", "()" + Type.getDescriptor( TraitableBean.class ), false ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V", true ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, internalProxy, "map", Type.getDescriptor( Map.class ) ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V", true); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "writeExternal", "(" + Type.getDescriptor( ObjectOutput.class ) + ")V", false ); mv.visitInsn( RETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } { mv = cw.visitMethod( ACC_PUBLIC, "readExternal", "(" + Type.getDescriptor( ObjectInput.class )+ ")V", null, new String[] { Type.getInternalName( IOException.class ), Type.getInternalName( ClassNotFoundException.class ) } ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ), true ); mv.visitTypeInsn( CHECKCAST, internalCore ); mv.visitFieldInsn( PUTFIELD, internalProxy, "object", descrCore ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ), true ); mv.visitTypeInsn( CHECKCAST, Type.getInternalName( Map.class ) ); mv.visitFieldInsn( PUTFIELD, internalProxy, "map", Type.getDescriptor( Map.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "readExternal", "(" + Type.getDescriptor( ObjectInput.class ) + ")V", false ); mv.visitInsn( RETURN ); // mv.visitMaxs( 3, 2 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } helpBuildClass( core, cw, internalProxy, descrCore, mask ); buildFields( core, mask, masterName, mixinInfo, cw ); buildKeys( core, masterName, cw ); buildMixinMethods( masterName, mixinInfo, cw ); buildCommonMethods( cw, masterName ); buildExtendedMethods( cw, trait, core ); buildShadowMethods( cw, trait, core ); cw.visitEnd(); return cw.toByteArray(); } protected void buildKeys( ClassDefinition core, String masterName, ClassWriter cw ) { boolean hasKeys = false; for ( FactField ff : trait.getFields() ) { if ( ff.isKey() ) { hasKeys = true; break; } } if ( ! hasKeys ) { buildEqualityMethods( cw, masterName, core.getClassName() ); } else { buildKeyedEqualityMethods( cw, trait, masterName ); } } protected void buildFields( ClassDefinition core, BitSet mask, String masterName, MixinInfo mixinInfo, ClassWriter cw ) { int j = 0; for ( FieldDefinition field : trait.getFieldsDefinitions() ) { boolean hardField = ! TraitRegistry.isSoftField( field, j++, mask ); if ( core.isFullTraiting() ) { buildLogicalGetter( cw, field, masterName, core ); if ( hardField ) { buildHardSetter( cw, field, masterName, trait, core ); } else { buildSoftSetter( cw, field, masterName, core ); } } else { if ( ! hardField ) { if (mixinInfo == null || !mixinInfo.isMixinGetter( field )) { buildSoftGetter( cw, field, masterName ); buildSoftSetter( cw, field, masterName, core ); } } else { buildHardGetter( cw, field, masterName, trait, core ); buildHardSetter( cw, field, masterName, trait, core ); } } } } protected void buildShadowMethods( ClassWriter cw, ClassDefinition trait, ClassDefinition core ) { for ( Method m : trait.getDefinedClass().getMethods() ) { if ( ! TraitFactory.excludeFromShadowing( m, trait ) ) { Method q; try { q = core.getDefinedClass().getMethod( m.getName(), m.getParameterTypes() ); if ( TraitFactory.isCompatible( m, q ) ) { buildShadowMethod( cw, trait, core, m ); } } catch ( NoSuchMethodException e ) { // nothing to do here } } } } protected Class getPossibleConstructor( Class<?> klass, Class<?> arg ) throws NoSuchMethodException { Constructor[] ctors = klass.getConstructors(); for ( Constructor c : ctors ) { Class<?>[] cpars = c.getParameterTypes(); if ( cpars.length != 1 || ! cpars[0].isAssignableFrom( arg ) ) { continue; } return cpars[0]; } throw new NoSuchMethodException( "Constructor for " + klass + " using " + arg + " not found " ); } protected void buildHardGetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition proxy, ClassDefinition core ) { buildHardGetter( cw, field, masterName, proxy, core, BuildUtils.getterName( field.getName(), field.getTypeName() ), ACC_PUBLIC ); } protected void buildHardGetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition proxy, ClassDefinition core, String getterName, int accessLevel ) { Class fieldType = field.getType(); MethodVisitor mv = cw.visitMethod( accessLevel, getterName, "()" + Type.getDescriptor( field.getType() ), null, null); mv.visitCode(); TraitFactory.invokeExtractor(mv, masterName, core, field ); if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitTypeInsn( CHECKCAST, Type.getInternalName( fieldType ) ); } mv.visitInsn( BuildUtils.returnType ( field.getTypeName() ) ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildHardSetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition trait, ClassDefinition core ) { buildHardSetter(cw, field, masterName, trait, core, BuildUtils.setterName( field.getName()), ACC_PUBLIC ); } protected void buildSoftSetter( ClassVisitor cw, FieldDefinition field, String proxy, ClassDefinition core ) { buildSoftSetter(cw, field, proxy, core, BuildUtils.setterName( field.getName()), ACC_PUBLIC ); } protected void buildSoftSetter( ClassVisitor cw, FieldDefinition field, String proxy, ClassDefinition core, String setterName, int accessMode ) { String type = field.getTypeName(); MethodVisitor mv = cw.visitMethod( accessMode, setterName, "(" + BuildUtils.getTypeDescriptor( type ) + ")V", null, null ); mv.visitCode(); if ( core.isFullTraiting() ) { logicalSetter( mv, field, proxy, core ); } mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( proxy ), "map", Type.getDescriptor( Map.class ) ); mv.visitLdcInsn( field.resolveAlias() ); mv.visitVarInsn( BuildUtils.varType( type ), 1 ); if ( BuildUtils.isPrimitive( type ) ) { TraitFactory.valueOf( mv, type ); } mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "put", "(" + Type.getDescriptor( Object.class ) + Type.getDescriptor( Object.class ) + ")" + Type.getDescriptor( Object.class ), true ); mv.visitInsn( POP ); mv.visitInsn( RETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildSoftGetter( ClassVisitor cw, FieldDefinition field, String proxy ) { buildSoftGetter( cw, field, proxy, BuildUtils.getterName( field.getName(), field.getTypeName() ), ACC_PUBLIC ); } protected void buildSoftGetter( ClassVisitor cw, FieldDefinition field, String proxy, String getterName, int accessLevel ) { String type = field.getTypeName(); MethodVisitor mv = cw.visitMethod( accessLevel, getterName, "()"+ BuildUtils.getTypeDescriptor( type ), null, null ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( proxy ), "map", Type.getDescriptor( Map.class ) ); mv.visitLdcInsn( field.resolveAlias() ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "get", "(" + Type.getDescriptor( Object.class ) + ")" + Type.getDescriptor( Object.class ), true ); String actualType = BuildUtils.isPrimitive( type ) ? BuildUtils.box( type ) : type; mv.visitVarInsn( ASTORE, 1 ); mv.visitVarInsn( ALOAD, 1 ); Label l0 = new Label(); mv.visitJumpInsn( IFNULL, l0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( INSTANCEOF, BuildUtils.getInternalType( actualType ) ); mv.visitJumpInsn( IFEQ, l0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( CHECKCAST, BuildUtils.getInternalType( actualType ) ); if ( BuildUtils.isPrimitive( type ) ) { TraitFactory.primitiveValue( mv, type ); mv.visitInsn( BuildUtils.returnType( type ) ); mv.visitLabel( l0 ); mv.visitInsn( BuildUtils.zero( type ) ); mv.visitInsn( BuildUtils.returnType( type ) ); } else { mv.visitInsn( ARETURN ); mv.visitLabel( l0 ); mv.visitInsn( ACONST_NULL ); mv.visitInsn( ARETURN ); } mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildCommonMethods(ClassWriter cw, String proxy ) { String proxyType = BuildUtils.getInternalType( proxy ); { MethodVisitor mv = cw.visitMethod( ACC_PUBLIC, "toString", "()" + Type.getDescriptor( String.class ), null, null ); mv.visitCode(); mv.visitTypeInsn( NEW, Type.getInternalName( StringBuilder.class ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( StringBuilder.class ), "<init>", "()V", false ); mv.visitLdcInsn( "(@" + proxy + ") : " ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "append", "(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( StringBuilder.class ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, "getFields", "()" + Type.getDescriptor( Map.class ), false ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "entrySet", "()" + Type.getDescriptor( Set.class ), true ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "toString", "()" + Type.getDescriptor( String.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "append", "(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( StringBuilder.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "toString", "()" + Type.getDescriptor( String.class ), false ); mv.visitInsn( ARETURN ); // mv.visitMaxs( 2, 1 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } } protected void buildKeyedEquals( ClassVisitor cw, ClassDefinition classDef, String proxyType ) { MethodVisitor mv; mv = cw.visitMethod( ACC_PUBLIC, "equals", "(" + Type.getDescriptor( Object.class ) + ")Z", null, null ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); Label l0 = new Label(); mv.visitJumpInsn( IF_ACMPNE, l0 ); mv.visitInsn( ICONST_1 ); mv.visitInsn( IRETURN ); mv.visitLabel( l0 ); mv.visitVarInsn( ALOAD, 1 ); Label l1 = new Label(); mv.visitJumpInsn( IFNULL, l1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "getClass", "()" + Type.getDescriptor( Class.class ), false ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "getClass", "()" + Type.getDescriptor( Class.class ), false ); Label l2 = new Label(); mv.visitJumpInsn( IF_ACMPEQ, l2 ); mv.visitLabel( l1 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l2 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( CHECKCAST, proxyType ); mv.visitVarInsn( ASTORE, 2 ); int x = 2; for ( FieldDefinition field : classDef.getFieldsDefinitions() ) { if ( field.isKey() ) { if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); Label l11 = new Label(); mv.visitJumpInsn( IFNULL, l11 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( field.getTypeName() ), "equals", "(" + Type.getDescriptor( Object.class ) + ")Z", false ); Label l12 = new Label(); mv.visitJumpInsn( IFNE, l12 ); Label l13 = new Label(); mv.visitJumpInsn( GOTO, l13 ); mv.visitLabel( l11 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitJumpInsn( IFNULL, l12 ); mv.visitLabel( l13 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l12 ); } else if ( "double".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Double.class ), "compare", "(DD)I", false ); Label l5 = new Label(); mv.visitJumpInsn( IFEQ, l5 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l5 ); x = Math.max( x, 4 ); } else if ( "float".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Float.class ), "compare", "(FF)I", false ); Label l6 = new Label(); mv.visitJumpInsn( IFEQ, l6 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l6 ); } else if ( "long".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitInsn( LCMP ); Label l8 = new Label(); mv.visitJumpInsn( IFEQ, l8 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l8 ); x = Math.max( x, 4 ); } else { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); Label l4 = new Label(); mv.visitJumpInsn( IF_ICMPEQ, l4 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l4 ); } } } mv.visitInsn( ICONST_1 ); mv.visitInsn( IRETURN ); // mv.visitMaxs( x, 3 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildKeyedHashCode( ClassVisitor cw, ClassDefinition classDef, String proxyType ) { MethodVisitor mv; { mv = cw.visitMethod( ACC_PUBLIC, "hashCode", "()I", null, null ); mv.visitCode(); mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ISTORE, 1 ); int x = 2; int y = 2; for ( FieldDefinition field : classDef.getFieldsDefinitions() ) { if ( field.isKey() ) { if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); Label l8 = new Label(); mv.visitJumpInsn( IFNULL, l8 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( field.getTypeName() ), "hashCode", "()I", false ); Label l9 = new Label(); mv.visitJumpInsn( GOTO, l9 ); mv.visitLabel( l8 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l9 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } else if ( "double".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitInsn( DCONST_0 ); mv.visitInsn( DCMPL ); Label l2 = new Label(); mv.visitJumpInsn( IFEQ, l2 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Double.class ), "doubleToLongBits", "(D)J", false ); Label l3 = new Label(); mv.visitJumpInsn( GOTO, l3 ); mv.visitLabel( l2 ); mv.visitInsn( LCONST_0 ); mv.visitLabel( l3 ); mv.visitVarInsn( LSTORE, 2 ); mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( LLOAD, 2 ); mv.visitVarInsn( LLOAD, 2 ); mv.visitIntInsn( BIPUSH, 32 ); mv.visitInsn( LUSHR ); mv.visitInsn( LXOR ); mv.visitInsn( L2I ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 6, x ); y = Math.max( 4, y ); } else if ( "boolean".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); Label l4 = new Label(); mv.visitJumpInsn( IFEQ, l4 ); mv.visitInsn( ICONST_1 ); Label l5 = new Label(); mv.visitJumpInsn( GOTO, l5 ); mv.visitLabel( l4 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l5 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } else if ( "float".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitInsn( FCONST_0 ); mv.visitInsn( FCMPL ); Label l6 = new Label(); mv.visitJumpInsn( IFEQ, l6 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Float.class ), "floatToIntBits", "(F)I", false ); Label l7 = new Label(); mv.visitJumpInsn( GOTO, l7 ); mv.visitLabel( l6 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l7 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 3, x ); } else if ( "long".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitIntInsn( BIPUSH, 32 ); mv.visitInsn( LUSHR ); mv.visitInsn( LXOR ); mv.visitInsn( L2I ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 6, x ); } else { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + Type.getDescriptor( field.getType() ), false ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } } } mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IRETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } } protected void buildExtendedMethods( ClassWriter cw, ClassDefinition trait, ClassDefinition core ) { buildSynchFields( cw, TraitFactory.getProxyName( trait, core ), trait, core ); } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.support; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.math.BigDecimal; import java.sql.Blob; import java.sql.Clob; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.sql.Types; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.jdbc.CannotGetJdbcConnectionException; import org.springframework.jdbc.datasource.DataSourceUtils; import org.springframework.lang.Nullable; import org.springframework.util.NumberUtils; import org.springframework.util.StringUtils; /** * Generic utility methods for working with JDBC. Mainly for internal use * within the framework, but also useful for custom JDBC access code. * * @author Thomas Risberg * @author Juergen Hoeller */ public abstract class JdbcUtils { /** * Constant that indicates an unknown (or unspecified) SQL type. * @see java.sql.Types */ public static final int TYPE_UNKNOWN = Integer.MIN_VALUE; private static final Log logger = LogFactory.getLog(JdbcUtils.class); private static final Map<Integer, String> typeNames = new HashMap<>(); static { try { for (Field field : Types.class.getFields()) { typeNames.put((Integer) field.get(null), field.getName()); } } catch (Exception ex) { throw new IllegalStateException("Failed to resolve JDBC Types constants", ex); } } /** * Close the given JDBC Connection and ignore any thrown exception. * This is useful for typical finally blocks in manual JDBC code. * @param con the JDBC Connection to close (may be {@code null}) */ public static void closeConnection(@Nullable Connection con) { if (con != null) { try { con.close(); } catch (SQLException ex) { logger.debug("Could not close JDBC Connection", ex); } catch (Throwable ex) { // We don't trust the JDBC driver: It might throw RuntimeException or Error. logger.debug("Unexpected exception on closing JDBC Connection", ex); } } } /** * Close the given JDBC Statement and ignore any thrown exception. * This is useful for typical finally blocks in manual JDBC code. * @param stmt the JDBC Statement to close (may be {@code null}) */ public static void closeStatement(@Nullable Statement stmt) { if (stmt != null) { try { stmt.close(); } catch (SQLException ex) { logger.trace("Could not close JDBC Statement", ex); } catch (Throwable ex) { // We don't trust the JDBC driver: It might throw RuntimeException or Error. logger.trace("Unexpected exception on closing JDBC Statement", ex); } } } /** * Close the given JDBC ResultSet and ignore any thrown exception. * This is useful for typical finally blocks in manual JDBC code. * @param rs the JDBC ResultSet to close (may be {@code null}) */ public static void closeResultSet(@Nullable ResultSet rs) { if (rs != null) { try { rs.close(); } catch (SQLException ex) { logger.trace("Could not close JDBC ResultSet", ex); } catch (Throwable ex) { // We don't trust the JDBC driver: It might throw RuntimeException or Error. logger.trace("Unexpected exception on closing JDBC ResultSet", ex); } } } /** * Retrieve a JDBC column value from a ResultSet, using the specified value type. * <p>Uses the specifically typed ResultSet accessor methods, falling back to * {@link #getResultSetValue(java.sql.ResultSet, int)} for unknown types. * <p>Note that the returned value may not be assignable to the specified * required type, in case of an unknown type. Calling code needs to deal * with this case appropriately, e.g. throwing a corresponding exception. * @param rs is the ResultSet holding the data * @param index is the column index * @param requiredType the required value type (may be {@code null}) * @return the value object (possibly not of the specified required type, * with further conversion steps necessary) * @throws SQLException if thrown by the JDBC API * @see #getResultSetValue(ResultSet, int) */ @Nullable public static Object getResultSetValue(ResultSet rs, int index, @Nullable Class<?> requiredType) throws SQLException { if (requiredType == null) { return getResultSetValue(rs, index); } Object value; // Explicitly extract typed value, as far as possible. if (String.class == requiredType) { return rs.getString(index); } else if (boolean.class == requiredType || Boolean.class == requiredType) { value = rs.getBoolean(index); } else if (byte.class == requiredType || Byte.class == requiredType) { value = rs.getByte(index); } else if (short.class == requiredType || Short.class == requiredType) { value = rs.getShort(index); } else if (int.class == requiredType || Integer.class == requiredType) { value = rs.getInt(index); } else if (long.class == requiredType || Long.class == requiredType) { value = rs.getLong(index); } else if (float.class == requiredType || Float.class == requiredType) { value = rs.getFloat(index); } else if (double.class == requiredType || Double.class == requiredType || Number.class == requiredType) { value = rs.getDouble(index); } else if (BigDecimal.class == requiredType) { return rs.getBigDecimal(index); } else if (java.sql.Date.class == requiredType) { return rs.getDate(index); } else if (java.sql.Time.class == requiredType) { return rs.getTime(index); } else if (java.sql.Timestamp.class == requiredType || java.util.Date.class == requiredType) { return rs.getTimestamp(index); } else if (byte[].class == requiredType) { return rs.getBytes(index); } else if (Blob.class == requiredType) { return rs.getBlob(index); } else if (Clob.class == requiredType) { return rs.getClob(index); } else if (requiredType.isEnum()) { // Enums can either be represented through a String or an enum index value: // leave enum type conversion up to the caller (e.g. a ConversionService) // but make sure that we return nothing other than a String or an Integer. Object obj = rs.getObject(index); if (obj instanceof String) { return obj; } else if (obj instanceof Number) { // Defensively convert any Number to an Integer (as needed by our // ConversionService's IntegerToEnumConverterFactory) for use as index return NumberUtils.convertNumberToTargetClass((Number) obj, Integer.class); } else { // e.g. on Postgres: getObject returns a PGObject but we need a String return rs.getString(index); } } else { // Some unknown type desired -> rely on getObject. try { return rs.getObject(index, requiredType); } catch (AbstractMethodError err) { logger.debug("JDBC driver does not implement JDBC 4.1 'getObject(int, Class)' method", err); } catch (SQLFeatureNotSupportedException ex) { logger.debug("JDBC driver does not support JDBC 4.1 'getObject(int, Class)' method", ex); } catch (SQLException ex) { logger.debug("JDBC driver has limited support for JDBC 4.1 'getObject(int, Class)' method", ex); } // Corresponding SQL types for JSR-310 / Joda-Time types, left up // to the caller to convert them (e.g. through a ConversionService). String typeName = requiredType.getSimpleName(); if ("LocalDate".equals(typeName)) { return rs.getDate(index); } else if ("LocalTime".equals(typeName)) { return rs.getTime(index); } else if ("LocalDateTime".equals(typeName)) { return rs.getTimestamp(index); } // Fall back to getObject without type specification, again // left up to the caller to convert the value if necessary. return getResultSetValue(rs, index); } // Perform was-null check if necessary (for results that the JDBC driver returns as primitives). return (rs.wasNull() ? null : value); } /** * Retrieve a JDBC column value from a ResultSet, using the most appropriate * value type. The returned value should be a detached value object, not having * any ties to the active ResultSet: in particular, it should not be a Blob or * Clob object but rather a byte array or String representation, respectively. * <p>Uses the {@code getObject(index)} method, but includes additional "hacks" * to get around Oracle 10g returning a non-standard object for its TIMESTAMP * datatype and a {@code java.sql.Date} for DATE columns leaving out the * time portion: These columns will explicitly be extracted as standard * {@code java.sql.Timestamp} object. * @param rs is the ResultSet holding the data * @param index is the column index * @return the value object * @throws SQLException if thrown by the JDBC API * @see java.sql.Blob * @see java.sql.Clob * @see java.sql.Timestamp */ @Nullable public static Object getResultSetValue(ResultSet rs, int index) throws SQLException { Object obj = rs.getObject(index); String className = null; if (obj != null) { className = obj.getClass().getName(); } if (obj instanceof Blob blob) { obj = blob.getBytes(1, (int) blob.length()); } else if (obj instanceof Clob clob) { obj = clob.getSubString(1, (int) clob.length()); } else if ("oracle.sql.TIMESTAMP".equals(className) || "oracle.sql.TIMESTAMPTZ".equals(className)) { obj = rs.getTimestamp(index); } else if (className != null && className.startsWith("oracle.sql.DATE")) { String metaDataClassName = rs.getMetaData().getColumnClassName(index); if ("java.sql.Timestamp".equals(metaDataClassName) || "oracle.sql.TIMESTAMP".equals(metaDataClassName)) { obj = rs.getTimestamp(index); } else { obj = rs.getDate(index); } } else if (obj instanceof java.sql.Date) { if ("java.sql.Timestamp".equals(rs.getMetaData().getColumnClassName(index))) { obj = rs.getTimestamp(index); } } return obj; } /** * Extract database meta-data via the given DatabaseMetaDataCallback. * <p>This method will open a connection to the database and retrieve its meta-data. * Since this method is called before the exception translation feature is configured * for a DataSource, this method can not rely on SQLException translation itself. * <p>Any exceptions will be wrapped in a MetaDataAccessException. This is a checked * exception and any calling code should catch and handle this exception. You can just * log the error and hope for the best, but there is probably a more serious error that * will reappear when you try to access the database again. * @param dataSource the DataSource to extract meta-data for * @param action callback that will do the actual work * @return object containing the extracted information, as returned by * the DatabaseMetaDataCallback's {@code processMetaData} method * @throws MetaDataAccessException if meta-data access failed * @see java.sql.DatabaseMetaData */ public static <T> T extractDatabaseMetaData(DataSource dataSource, DatabaseMetaDataCallback<T> action) throws MetaDataAccessException { Connection con = null; try { con = DataSourceUtils.getConnection(dataSource); DatabaseMetaData metaData; try { metaData = con.getMetaData(); } catch (SQLException ex) { if (DataSourceUtils.isConnectionTransactional(con, dataSource)) { // Probably a closed thread-bound Connection - retry against fresh Connection DataSourceUtils.releaseConnection(con, dataSource); con = null; logger.debug("Failed to obtain DatabaseMetaData from transactional Connection - " + "retrying against fresh Connection", ex); con = dataSource.getConnection(); metaData = con.getMetaData(); } else { throw ex; } } if (metaData == null) { // should only happen in test environments throw new MetaDataAccessException("DatabaseMetaData returned by Connection [" + con + "] was null"); } return action.processMetaData(metaData); } catch (CannotGetJdbcConnectionException ex) { throw new MetaDataAccessException("Could not get Connection for extracting meta-data", ex); } catch (SQLException ex) { throw new MetaDataAccessException("Error while extracting DatabaseMetaData", ex); } catch (AbstractMethodError err) { throw new MetaDataAccessException( "JDBC DatabaseMetaData method not implemented by JDBC driver - upgrade your driver", err); } finally { DataSourceUtils.releaseConnection(con, dataSource); } } /** * Call the specified method on DatabaseMetaData for the given DataSource, * and extract the invocation result. * @param dataSource the DataSource to extract meta-data for * @param metaDataMethodName the name of the DatabaseMetaData method to call * @return the object returned by the specified DatabaseMetaData method * @throws MetaDataAccessException if we couldn't access the DatabaseMetaData * or failed to invoke the specified method * @see java.sql.DatabaseMetaData * @deprecated as of 5.2.9, in favor of * {@link #extractDatabaseMetaData(DataSource, DatabaseMetaDataCallback)} * with a lambda expression or method reference and a generically typed result */ @Deprecated @SuppressWarnings("unchecked") public static <T> T extractDatabaseMetaData(DataSource dataSource, final String metaDataMethodName) throws MetaDataAccessException { return (T) extractDatabaseMetaData(dataSource, dbmd -> { try { return DatabaseMetaData.class.getMethod(metaDataMethodName).invoke(dbmd); } catch (NoSuchMethodException ex) { throw new MetaDataAccessException("No method named '" + metaDataMethodName + "' found on DatabaseMetaData instance [" + dbmd + "]", ex); } catch (IllegalAccessException ex) { throw new MetaDataAccessException( "Could not access DatabaseMetaData method '" + metaDataMethodName + "'", ex); } catch (InvocationTargetException ex) { if (ex.getTargetException() instanceof SQLException) { throw (SQLException) ex.getTargetException(); } throw new MetaDataAccessException( "Invocation of DatabaseMetaData method '" + metaDataMethodName + "' failed", ex); } }); } /** * Return whether the given JDBC driver supports JDBC 2.0 batch updates. * <p>Typically invoked right before execution of a given set of statements: * to decide whether the set of SQL statements should be executed through * the JDBC 2.0 batch mechanism or simply in a traditional one-by-one fashion. * <p>Logs a warning if the "supportsBatchUpdates" methods throws an exception * and simply returns {@code false} in that case. * @param con the Connection to check * @return whether JDBC 2.0 batch updates are supported * @see java.sql.DatabaseMetaData#supportsBatchUpdates() */ public static boolean supportsBatchUpdates(Connection con) { try { DatabaseMetaData dbmd = con.getMetaData(); if (dbmd != null) { if (dbmd.supportsBatchUpdates()) { logger.debug("JDBC driver supports batch updates"); return true; } else { logger.debug("JDBC driver does not support batch updates"); } } } catch (SQLException ex) { logger.debug("JDBC driver 'supportsBatchUpdates' method threw exception", ex); } return false; } /** * Extract a common name for the target database in use even if * various drivers/platforms provide varying names at runtime. * @param source the name as provided in database meta-data * @return the common name to be used (e.g. "DB2" or "Sybase") */ @Nullable public static String commonDatabaseName(@Nullable String source) { String name = source; if (source != null && source.startsWith("DB2")) { name = "DB2"; } else if ("MariaDB".equals(source)) { name = "MySQL"; } else if ("Sybase SQL Server".equals(source) || "Adaptive Server Enterprise".equals(source) || "ASE".equals(source) || "sql server".equalsIgnoreCase(source) ) { name = "Sybase"; } return name; } /** * Check whether the given SQL type is numeric. * @param sqlType the SQL type to be checked * @return whether the type is numeric */ public static boolean isNumeric(int sqlType) { return (Types.BIT == sqlType || Types.BIGINT == sqlType || Types.DECIMAL == sqlType || Types.DOUBLE == sqlType || Types.FLOAT == sqlType || Types.INTEGER == sqlType || Types.NUMERIC == sqlType || Types.REAL == sqlType || Types.SMALLINT == sqlType || Types.TINYINT == sqlType); } /** * Resolve the standard type name for the given SQL type, if possible. * @param sqlType the SQL type to resolve * @return the corresponding constant name in {@link java.sql.Types} * (e.g. "VARCHAR"/"NUMERIC"), or {@code null} if not resolvable * @since 5.2 */ @Nullable public static String resolveTypeName(int sqlType) { return typeNames.get(sqlType); } /** * Determine the column name to use. The column name is determined based on a * lookup using ResultSetMetaData. * <p>This method implementation takes into account recent clarifications * expressed in the JDBC 4.0 specification: * <p><i>columnLabel - the label for the column specified with the SQL AS clause. * If the SQL AS clause was not specified, then the label is the name of the column</i>. * @param resultSetMetaData the current meta-data to use * @param columnIndex the index of the column for the look up * @return the column name to use * @throws SQLException in case of lookup failure */ public static String lookupColumnName(ResultSetMetaData resultSetMetaData, int columnIndex) throws SQLException { String name = resultSetMetaData.getColumnLabel(columnIndex); if (!StringUtils.hasLength(name)) { name = resultSetMetaData.getColumnName(columnIndex); } return name; } /** * Convert a column name with underscores to the corresponding property name using "camel case". * A name like "customer_number" would match a "customerNumber" property name. * @param name the column name to be converted * @return the name using "camel case" */ public static String convertUnderscoreNameToPropertyName(@Nullable String name) { StringBuilder result = new StringBuilder(); boolean nextIsUpper = false; if (name != null && name.length() > 0) { if (name.length() > 1 && name.charAt(1) == '_') { result.append(Character.toUpperCase(name.charAt(0))); } else { result.append(Character.toLowerCase(name.charAt(0))); } for (int i = 1; i < name.length(); i++) { char c = name.charAt(i); if (c == '_') { nextIsUpper = true; } else { if (nextIsUpper) { result.append(Character.toUpperCase(c)); nextIsUpper = false; } else { result.append(Character.toLowerCase(c)); } } } } return result.toString(); } }
package com.octo.android.robospice.persistence.ormlite; import java.util.ArrayList; import java.util.List; import android.app.Application; import android.test.AndroidTestCase; import android.test.suitebuilder.annotation.SmallTest; import com.octo.android.robospice.ormlite.test.model.CurrenWeather; import com.octo.android.robospice.ormlite.test.model.Day; import com.octo.android.robospice.ormlite.test.model.Forecast; import com.octo.android.robospice.ormlite.test.model.Night; import com.octo.android.robospice.ormlite.test.model.Weather; import com.octo.android.robospice.ormlite.test.model.Wind; import com.octo.android.robospice.persistence.DurationInMillis; import com.octo.android.robospice.persistence.ObjectPersister; @SmallTest public class InDatabaseWeatherPersisterTest extends AndroidTestCase { private ObjectPersister<Weather> dataPersistenceManager; private static final CurrenWeather TEST_TEMP = new CurrenWeather(); private static final CurrenWeather TEST_TEMP2 = new CurrenWeather(); private static final int WEATHER_ID = 1; private static final int WEATHER_ID2 = 2; private static final int CACHE_KEY = 1; private static final int CACHE_KEY2 = 2; private static final String CACHE_KEY3_STRING = "cache_key_3"; @Override protected void setUp() throws Exception { super.setUp(); Application application = (Application) getContext().getApplicationContext(); List<Class<?>> classCollection = new ArrayList<Class<?>>(); // add persisted classes to class collection classCollection.add(Weather.class); classCollection.add(CurrenWeather.class); classCollection.add(Day.class); classCollection.add(Forecast.class); classCollection.add(Night.class); classCollection.add(Wind.class); RoboSpiceDatabaseHelper databaseHelper = new RoboSpiceDatabaseHelper(application, "sample_database.db", 1); databaseHelper.clearTableFromDataBase(Weather.class); InDatabaseObjectPersisterFactory inDatabaseObjectPersisterFactory = new InDatabaseObjectPersisterFactory(application, databaseHelper, classCollection); dataPersistenceManager = inDatabaseObjectPersisterFactory.createObjectPersister(Weather.class); TEST_TEMP.setTemp("28"); TEST_TEMP.setTemp_unit("C"); TEST_TEMP2.setTemp("30"); TEST_TEMP2.setTemp_unit("C"); } @Override protected void tearDown() throws Exception { dataPersistenceManager.removeAllDataFromCache(); super.tearDown(); } public void test_canHandleClientRequestStatus() { boolean canHandleClientWeather = dataPersistenceManager.canHandleClass(Weather.class); assertEquals(true, canHandleClientWeather); } public void test_saveDataAndReturnData() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); // WHEN Weather weatherReturned = dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, 1); // THEN assertTrue(weatherReturned.getListWeather().contains(TEST_TEMP)); } public void test_loadDataFromCache_no_expiracy() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); // WHEN Weather weatherReturned = dataPersistenceManager.loadDataFromCache(CACHE_KEY, DurationInMillis.ALWAYS_RETURNED); // THEN assertEquals(WEATHER_ID, weatherReturned.getId()); assertTrue(weatherReturned.getListWeather().contains(TEST_TEMP)); } public void test_loadDataFromCache_not_expired() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); // WHEN Weather weatherReturned = dataPersistenceManager.loadDataFromCache(CACHE_KEY, DurationInMillis.ONE_SECOND); // THEN assertTrue(weatherReturned.getListWeather().contains(TEST_TEMP)); } public void test_loadDataFromCache_expired() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); Thread.sleep(DurationInMillis.ONE_SECOND); // WHEN Weather weatherReturned = dataPersistenceManager.loadDataFromCache(CACHE_KEY, DurationInMillis.ONE_SECOND); // THEN assertNull(weatherReturned); } public void test_loadAllDataFromCache_with_one_request_in_cache() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); // WHEN List<Weather> listWeatherResult = dataPersistenceManager.loadAllDataFromCache(); // THEN assertNotNull(listWeatherResult); assertEquals(1, listWeatherResult.size()); assertEquals(weatherRequestStatus, listWeatherResult.get(0)); } public void test_loadAllDataFromCache_with_two_requests_in_cache() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); Weather weatherRequestStatus2 = buildWeather(WEATHER_ID2, TEST_TEMP2); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus2, CACHE_KEY2); // WHEN List<Weather> listWeatherResult = dataPersistenceManager.loadAllDataFromCache(); // THEN assertNotNull(listWeatherResult); assertEquals(2, listWeatherResult.size()); assertTrue(listWeatherResult.contains(weatherRequestStatus)); assertTrue(listWeatherResult.contains(weatherRequestStatus2)); } public void test_loadAllDataFromCache_with_no_requests_in_cache() throws Exception { // GIVEN // WHEN List<Weather> listWeather = dataPersistenceManager.loadAllDataFromCache(); // THEN assertNotNull(listWeather); assertTrue(listWeather.isEmpty()); } public void test_removeDataFromCache_when_two_requests_in_cache_and_one_removed() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY); Weather weatherRequestStatus2 = buildWeather(WEATHER_ID2, TEST_TEMP2); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus2, CACHE_KEY2); dataPersistenceManager.removeDataFromCache(CACHE_KEY2); // WHEN List<Weather> listWeatherResult = dataPersistenceManager.loadAllDataFromCache(); // THEN assertNotNull(listWeatherResult); assertEquals(1, listWeatherResult.size()); assertTrue(listWeatherResult.contains(weatherRequestStatus)); assertFalse(listWeatherResult.contains(weatherRequestStatus2)); } public void test_cacheKey_can_be_string_when_object_type_has_int_id() throws Exception { // GIVEN Weather weatherRequestStatus = buildWeather(WEATHER_ID, TEST_TEMP); dataPersistenceManager.saveDataToCacheAndReturnData(weatherRequestStatus, CACHE_KEY3_STRING); // WHEN Weather weatherReturned = dataPersistenceManager.loadDataFromCache(CACHE_KEY3_STRING, DurationInMillis.ALWAYS_RETURNED); List<Weather> listWeatherResult = dataPersistenceManager.loadAllDataFromCache(); // THEN assertNotNull(listWeatherResult); assertEquals(1, listWeatherResult.size()); assertTrue(listWeatherResult.contains(weatherRequestStatus)); assertEquals(WEATHER_ID, weatherReturned.getId()); } private Weather buildWeather(int id, CurrenWeather currenWeather) { Weather weather = new Weather(); weather.setId(id); List<CurrenWeather> currents = new ArrayList<CurrenWeather>(); currents.add(currenWeather); weather.setListWeather(currents); weather.setListForecast(null); return weather; } }
/* * Copyright (C) 2019 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gapid.perfetto.views; import static com.google.gapid.util.Colors.hsl; import static com.google.gapid.util.Colors.rgb; import static com.google.gapid.util.Colors.rgba; import com.google.gapid.perfetto.canvas.RenderContext; import com.google.gapid.widgets.Theme; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.RGBA; /** * Constants governing the look of the UI. */ public class StyleConstants { public static final double TITLE_HEIGHT = 25; public static final double LABEL_OFFSET = 20; public static final double LABEL_ICON_SIZE = 16; public static final double LABEL_WIDTH = 280; public static final double LABEL_MARGIN = 4; public static final double LABEL_PIN_X = LABEL_WIDTH - LABEL_MARGIN - LABEL_ICON_SIZE; public static final double LABEL_TOGGLE_X = LABEL_PIN_X - LABEL_ICON_SIZE; public static final double TRACK_MARGIN = 4; public static final double DEFAULT_COUNTER_TRACK_HEIGHT = 45; public static final double PROCESS_COUNTER_TRACK_HIGHT = 30; public static final double HIGHLIGHT_EDGE_NEARBY_WIDTH = 10; public static final double SELECTION_THRESHOLD = 0.333; public static final double ZOOM_FACTOR_SCALE = 0.05; public static final double ZOOM_FACTOR_SCALE_DRAG = 0.01; // Keyboard handling constants. public static final int KB_DELAY = 20; public static final int KB_PAN_SLOW = 30; public static final int KB_PAN_FAST = 60; public static final double KB_ZOOM_SLOW = 2 * ZOOM_FACTOR_SCALE; public static final double KB_ZOOM_FAST = 3 * ZOOM_FACTOR_SCALE; public static class Colors { public final RGBA background; public final RGBA titleBackground; public final RGBA gridline; public final RGBA panelBorder; public final RGBA hoverBackground; public final RGBA loadingBackground; public final RGBA loadingForeground; public final RGBA selectionBackground; public final RGBA timeHighlight; public final RGBA timeHighlightBorder; public final RGBA timeHighlightCover; public final RGBA timeHighlightEmphasize; public final RGBA cpuFreqIdle; public final RGBA timelineRuler; public final RGBA vsyncBackground; public final RGBA flagLine; public final RGBA flagHover; public final RGBA textMain; public final RGBA textAlt; public Colors(RGBA background, RGBA titleBackground, RGBA gridline, RGBA panelBorder, RGBA hoverBackground, RGBA loadingBackground, RGBA loadingForeground, RGBA selectionBackground, RGBA timeHighlight, RGBA timeHighlightBorder, RGBA timeHighlightCover, RGBA timeHighlightEmphasize, RGBA cpuFreqIdle, RGBA timelineRuler, RGBA vsyncBackground, RGBA flagLine, RGBA flagHover, RGBA textMain, RGBA textAlt) { this.background = background; this.titleBackground = titleBackground; this.gridline = gridline; this.panelBorder = panelBorder; this.hoverBackground = hoverBackground; this.loadingBackground = loadingBackground; this.loadingForeground = loadingForeground; this.selectionBackground = selectionBackground; this.timeHighlight = timeHighlight; this.timeHighlightBorder = timeHighlightBorder; this.timeHighlightCover = timeHighlightCover; this.timeHighlightEmphasize = timeHighlightEmphasize; this.cpuFreqIdle = cpuFreqIdle; this.timelineRuler = timelineRuler; this.vsyncBackground = vsyncBackground; this.flagLine = flagLine; this.flagHover = flagHover; this.textMain = textMain; this.textAlt = textAlt; } private static final RGBA LIGHT_BACKGROUND = rgb(0xff, 0xff, 0xff); private static final RGBA LIGHT_TITLE_BACKGROUND = rgb(0xe9, 0xe9, 0xe9); private static final RGBA LIGHT_GRIDLINE = rgb(0xda, 0xda, 0xda); private static final RGBA LIGHT_PANEL_BORDER = LIGHT_GRIDLINE; private static final RGBA LIGHT_HOVER_BACKGROUND = rgba(0xf7, 0xf7, 0xf7, 0.95f); private static final RGBA LIGHT_LOADING_BACKGROUND = rgb(0xe0, 0xe6, 0xe8); private static final RGBA LIGHT_LOADING_FOREGROUND = rgb(0x66, 0x66, 0x66); private static final RGBA LIGHT_SELECTION_BACKGROUND = rgba(0, 0, 255, 0.3f); private static final RGBA LIGHT_TIME_HIGHLIGHT = rgb(0x32, 0x34, 0x35); private static final RGBA LIGHT_TIME_HIGHLIGHT_BORDER = LIGHT_GRIDLINE; private static final RGBA LIGHT_TIME_HIGHLIGHT_COVER = rgba(0, 0, 0, 0.2f); private static final RGBA LIGHT_TIME_HIGHLIGHT_EMPHASIZE = rgb(0xff, 0xde, 0x00); private static final RGBA LIGHT_CPU_FREQ_IDLE = rgb(0xf0, 0xf0, 0xf0); private static final RGBA LIGHT_TIMELINE_RULER = rgb(0x99, 0x99, 0x99); private static final RGBA LIGHT_VSYNC_BACKGROUND = rgb(0xf5, 0xf5, 0xf5); private static final RGBA LIGHT_FLAG_LINE = rgb(0, 0, 0); private static final RGBA LIGHT_FLAG_HOVER = rgb(0x80, 0x80, 0x80); private static final RGBA LIGHT_TEXT_MAIN = rgb(0x32, 0x34, 0x35); private static final RGBA LIGHT_TEXT_ALT = rgb(101, 102, 104); public static Colors light() { return new Colors( LIGHT_BACKGROUND, LIGHT_TITLE_BACKGROUND, LIGHT_GRIDLINE, LIGHT_PANEL_BORDER, LIGHT_HOVER_BACKGROUND, LIGHT_LOADING_BACKGROUND, LIGHT_LOADING_FOREGROUND, LIGHT_SELECTION_BACKGROUND, LIGHT_TIME_HIGHLIGHT, LIGHT_TIME_HIGHLIGHT_BORDER, LIGHT_TIME_HIGHLIGHT_COVER, LIGHT_TIME_HIGHLIGHT_EMPHASIZE, LIGHT_CPU_FREQ_IDLE, LIGHT_TIMELINE_RULER, LIGHT_VSYNC_BACKGROUND, LIGHT_FLAG_LINE, LIGHT_FLAG_HOVER, LIGHT_TEXT_MAIN, LIGHT_TEXT_ALT); } private static final RGBA DARK_BACKGROUND = rgb(0x1a, 0x1a, 0x1a); private static final RGBA DARK_TITLE_BACKGROUND = rgb(0x3b, 0x3b, 0x3b); private static final RGBA DARK_GRIDLINE = rgb(0x40, 0x40, 0x40); private static final RGBA DARK_PANEL_BORDER = DARK_GRIDLINE; private static final RGBA DARK_HOVER_BACKGROUND = rgba(0x17, 0x17, 0x17, 0.8f); private static final RGBA DARK_LOADING_BACKGROUND = rgb(0x4a, 0x4a, 0x4a); private static final RGBA DARK_LOADING_FOREGROUND = rgb(0xaa, 0xaa, 0xaa); private static final RGBA DARK_SELECTION_BACKGROUND = rgba(0, 0, 255, 0.5f); private static final RGBA DARK_TIME_HIGHLIGHT = rgb(0xff, 0xff, 0xff); private static final RGBA DARK_TIME_HIGHLIGHT_BORDER = DARK_GRIDLINE; private static final RGBA DARK_TIME_HIGHLIGHT_COVER = rgba(0xff, 0xff, 0xff, 0.2f); private static final RGBA DARK_TIME_HIGHLIGHT_EMPHASIZE = rgb(0xd2, 0xb6, 0x00); private static final RGBA DARK_CPU_FREQ_IDLE = rgb(0x55, 0x55, 0x55); private static final RGBA DARK_TIMELINE_RULER = rgb(0x99, 0x99, 0x99); private static final RGBA DARK_VSYNC_BACKGROUND = rgb(0x24, 0x24, 0x24); private static final RGBA DARK_FLAG_LINE = rgb(0xff, 0xff, 0xff); private static final RGBA DARK_FLAG_HOVER = rgb(0x80, 0x80, 0x80); private static final RGBA DARK_TEXT_MAIN = rgb(0xf1, 0xf1, 0xf8); private static final RGBA DARK_TEXT_ALT = rgb(0xdd, 0xdd, 0xdd); public static Colors dark() { return new Colors( DARK_BACKGROUND, DARK_TITLE_BACKGROUND, DARK_GRIDLINE, DARK_PANEL_BORDER, DARK_HOVER_BACKGROUND, DARK_LOADING_BACKGROUND, DARK_LOADING_FOREGROUND, DARK_SELECTION_BACKGROUND, DARK_TIME_HIGHLIGHT, DARK_TIME_HIGHLIGHT_BORDER, DARK_TIME_HIGHLIGHT_COVER, DARK_TIME_HIGHLIGHT_EMPHASIZE, DARK_CPU_FREQ_IDLE, DARK_TIMELINE_RULER, DARK_VSYNC_BACKGROUND, DARK_FLAG_LINE, DARK_FLAG_HOVER, DARK_TEXT_MAIN, DARK_TEXT_ALT); } } private static Colors colors = Colors.light(); private static boolean isDark = false; private StyleConstants() { } public static Colors colors() { return colors; } public static Gradient gradient(int seed) { // See Gradients.Colors for explanation of magic constants. int idx = ((seed + 8) & 0x7fffffff) % Gradients.COUNT; return (isDark ? Gradients.DARK : Gradients.LIGHT)[idx]; } public static Gradient mainGradient() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[14] : Gradients.LIGHT[14]; } public static Gradient threadStateSleeping() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[15] : Gradients.LIGHT[15]; } public static Gradient threadStateRunnable() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[13] : Gradients.LIGHT[13]; } public static Gradient threadStateRunning() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[10] : Gradients.LIGHT[10]; } public static Gradient threadStateBlockedOk() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[1] : Gradients.LIGHT[1]; } public static Gradient threadStateBlockedWarn() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[2] : Gradients.LIGHT[2]; } public static Gradient batteryInGradient() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[10] : Gradients.LIGHT[10]; } public static Gradient batteryOutGradient() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[10] : Gradients.LIGHT[1]; } public static Gradient memoryUsedGradient() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[13] : Gradients.LIGHT[13]; } public static Gradient memoryBuffersGradient() { // See Gradients.Colors for explanation of magic constants. return isDark ? Gradients.DARK[14] : Gradients.LIGHT[14]; } public static boolean isLight() { return !isDark; } public static boolean isDark() { return isDark; } public static void setDark(boolean dark) { isDark = dark; colors = isDark ? Colors.dark() : Colors.light(); } public static void toggleDark() { setDark(!isDark); } public static Image arrowDown(Theme theme) { return isDark ? theme.arrowDropDownDark() : theme.arrowDropDownLight(); } public static Image arrowRight(Theme theme) { return isDark ? theme.arrowDropRightDark() : theme.arrowDropRightLight(); } public static Image unfoldMore(Theme theme) { return isDark ? theme.unfoldMoreDark() : theme.unfoldMoreLight(); } public static Image unfoldLess(Theme theme) { return isDark ? theme.unfoldLessDark() : theme.unfoldLessLight(); } public static Image rangeStart(Theme theme) { return isDark ? theme.rangeStartDark() : theme.rangeStartLight(); } public static Image rangeEnd(Theme theme) { return isDark ? theme.rangeEndDark() : theme.rangeEndLight(); } public static Image pinActive(Theme theme) { return isDark ? theme.pinActiveDark() : theme.pinActiveLight(); } public static Image pinInactive(Theme theme) { return isDark ? theme.pinInactiveDark() : theme.pinInactiveLight(); } public static Image flag(Theme theme) { return isDark ? theme.flagDark() : theme.flagLight(); } public static Image flagFilled(Theme theme) { return isDark ? theme.flagFilledDark() : theme.flagFilledLight(); } public static Image flagGreyed(Theme theme) { return theme.flagGreyed(); } public static class Gradient { private static final float HIGH_TARGET = 0.9f; private static final float LOW_TARGET = 0.2f; private static final float LIGHT_BASE = 0.3f; private static final float LIGHT_BORDER = -0.1f; private static final float LIGHT_HIGHLIHGT = -0.5f; private static final float DARK_BASE = 0.1f; private static final float DARK_BORDER = -0.4f; private static final float DARK_HIGHLIGHT = 0.7f; public final RGBA base; public final RGBA border; public final RGBA highlight; public final RGBA alternate; public final RGBA disabled = hsl(0, 0, 0.62f); private final float h, s, l; private final float high, low; public Gradient(float h, float s, float l, boolean light) { this.h = h; this.s = s; this.l = l; this.high = light ? HIGH_TARGET : LOW_TARGET; this.low = light ? LOW_TARGET : HIGH_TARGET; this.base = lerp(light ? LIGHT_BASE : DARK_BASE); this.border = lerp(light ? LIGHT_BORDER : DARK_BORDER); this.highlight = lerp(light ? LIGHT_HIGHLIHGT : DARK_HIGHLIGHT); this.alternate = lerp(1); } /** * @param x interpolation multiplier in the range [-1, 1]. */ public RGBA lerp(float x) { if (x < 0) { return hsl(h, s, l - x * (low - l)); } else { return hsl(h, s, l + x * (high - l)); } } /** Sets fill to base. **/ public void applyBase(RenderContext ctx) { ctx.setBackgroundColor(base); } /** Sets fill to base, and stroke to border. **/ public void applyBaseAndBorder(RenderContext ctx) { ctx.setForegroundColor(border); ctx.setBackgroundColor(base); } } private static class Gradients { // Order here matters, when changing, adjust the indices above. private static final float[][] COLORS = { { 15.38f, 0.2633f, 0.6000f }, // Brown { 20.15f, 0.8954f, 0.7000f }, // Orange // blocked OK, battery out { 21.92f, 0.7626f, 0.4294f }, // Dark Orange // blocked warn { 36.22f, 0.7312f, 0.5000f }, // Light Brown { 42.19f, 0.4412f, 0.7490f }, // Tan { 50.00f, 0.5822f, 0.5844f }, // Gold { 59.49f, 0.5109f, 0.4490f }, // Lime { 66.00f, 0.8233f, 0.5400f }, // Apple Green { 88.00f, 0.5000f, 0.5300f }, // Chartreuse { 122.00f, 0.3900f, 0.4900f }, // Dark Green { 130.91f, 0.6548f, 0.6706f }, // Green // running, battery in { 171.02f, 0.5787f, 0.5598f }, // Turquoise { 172.36f, 0.7432f, 0.2902f }, // Teal { 198.40f, 1.0000f, 0.4157f }, // Pacific Blue // runnable, mem used { 200.22f, 0.9787f, 0.8157f }, // Light Blue // main, mem buf/cache { 201.95f, 0.2455f, 0.6725f }, // Grey // sleeping { 214.85f, 1.0000f, 0.5510f }, // Vivid Blue { 217.06f, 0.5000f, 0.4000f }, // Indigo { 261.54f, 0.5065f, 0.6980f }, // Light Purple { 262.30f, 0.6981f, 0.5843f }, // Purple { 298.56f, 0.5540f, 0.6845f }, // Light Magenta { 319.69f, 0.5333f, 0.4706f }, // Magenta { 338.32f, 0.7041f, 0.6686f }, // Pink }; public static final Gradient[] LIGHT; public static final Gradient[] DARK; public static final int COUNT = COLORS.length; static { LIGHT = new Gradient[COUNT]; DARK = new Gradient[COUNT]; for (int i = 0; i < COUNT; i++) { float h = COLORS[i][0], s = COLORS[i][1], l = COLORS[i][2]; LIGHT[i] = new Gradient(h, s, l, true); DARK[i] = new Gradient(h, s, l, false); } } } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.internal.offheap; import java.util.Properties; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import com.gemstone.gemfire.OutOfOffHeapMemoryException; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheClosedException; import com.gemstone.gemfire.cache.CacheFactory; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.RegionShortcut; import com.gemstone.gemfire.cache30.CacheTestCase; import com.gemstone.gemfire.distributed.DistributedSystem; import com.gemstone.gemfire.distributed.DistributedSystemDisconnectedException; import com.gemstone.gemfire.distributed.internal.DistributionConfig; import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem; import com.gemstone.gemfire.internal.cache.DistributedRegion; import com.gemstone.gemfire.internal.util.StopWatch; import dunit.Host; import dunit.SerializableRunnable; /** * Test behavior of region when running out of off-heap memory. * * @author Kirk Lund */ @SuppressWarnings("serial") public class OutOfOffHeapMemoryDUnitTest extends CacheTestCase { private static final String EXPECTED_EXCEPTIONS = "com.gemstone.gemfire.OutOfOffHeapMemoryException"; private static final String ADD_EXPECTED_EXCEPTIONS = "<ExpectedException action=add>" + EXPECTED_EXCEPTIONS + "</ExpectedException>"; private static final String REMOVE_EXPECTED_EXCEPTIONS = "<ExpectedException action=remove>" + EXPECTED_EXCEPTIONS + "</ExpectedException>"; public OutOfOffHeapMemoryDUnitTest(String name) { super(name); } @Override public void setUp() throws Exception { super.setUp(); long begin = System.currentTimeMillis(); Cache gfc = null; while (gfc == null) { try { gfc = getCache(); break; } catch (IllegalStateException e) { if (System.currentTimeMillis() > begin+60*1000) { fail("OutOfOffHeapMemoryDUnitTest waited too long to getCache", e); } else if (e.getMessage().contains("A connection to a distributed system already exists in this VM. It has the following configuration")) { InternalDistributedSystem ids = InternalDistributedSystem.getAnyInstance(); if (ids != null && ids.isConnected()) { ids.getLogWriter().warning("OutOfOffHeapMemoryDUnitTest found DistributedSystem connection from previous test", e); ids.disconnect(); } } else { throw e; } } } } @Override public void tearDown2() throws Exception { invokeInEveryVM(getClass(), "cleanup"); // invokeInEveryVM(new SerializableRunnable() { // public void run() { // cleanup(); // } // }); } // public static void caseSetUp() { // for (int i = 0; i < Host.getHost(0).getVMCount(); i++) { // Host.getHost(0).getVM(i).invoke(new SerializableRunnable() { // public void run() { // InternalDistributedSystem ids = InternalDistributedSystem.getAnyInstance(); // if (ids != null && ids.isConnected()) { // ids.getLogWriter().warning("OutOfOffHeapMemoryDUnitTest: Found DistributedSystem connection from previous test " + ids); // ids.disconnect(); // } // } // }); // } // } @SuppressWarnings("unused") // invoked by reflection from tearDown2() private static void cleanup() { try { //cache.get().getLogger().info(REMOVE_EXPECTED_EXCEPTIONS); //getCache().getLogger().info(REMOVE_EXPECTED_EXCEPTIONS); getLogWriter().info(REMOVE_EXPECTED_EXCEPTIONS); } finally { disconnectFromDS(); SimpleMemoryAllocatorImpl.freeOffHeapMemory(); cache.set(null); system.set(null); isSmallerVM.set(false); } } protected String getOffHeapMemorySize() { return "2m"; } protected String getSmallerOffHeapMemorySize() { return "1m"; } protected RegionShortcut getRegionShortcut() { return RegionShortcut.REPLICATE; } protected String getRegionName() { return "region1"; } @Override public Properties getDistributedSystemProperties() { Properties props = new Properties(); props.put(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true"); if (isSmallerVM.get()) { props.setProperty(DistributionConfig.OFF_HEAP_MEMORY_SIZE_NAME, getSmallerOffHeapMemorySize()); } else { props.setProperty(DistributionConfig.OFF_HEAP_MEMORY_SIZE_NAME, getOffHeapMemorySize()); } return props; } public void testSimpleOutOfOffHeapMemoryMemberDisconnects() { final Cache cache = getCache(); final DistributedSystem system = getSystem(); Region<Object, Object> region = cache.createRegionFactory(getRegionShortcut()).setEnableOffHeapMemory(true).create(getRegionName()); OutOfOffHeapMemoryException ooohme; try { for (int i = 0; true; i++) { region.put("key-"+i, new Byte[1024]); } } catch (OutOfOffHeapMemoryException e) { ooohme = e; } assertNotNull(ooohme); final WaitCriterion waitForDisconnect = new WaitCriterion() { public boolean done() { return cache.isClosed() && !system.isConnected(); } public String description() { return "Waiting for disconnect to complete"; } }; waitForCriterion(waitForDisconnect, 10*1000, 100, true); assertTrue(cache.isClosed()); assertFalse(system.isConnected()); // final WaitCriterion waitForNull = new WaitCriterion() { // public boolean done() { // return GemFireCacheImpl.getInstance() == null; // } // public String description() { // return "Waiting for GemFireCacheImpl to null its instance"; // } // }; // waitForCriterion(waitForNull, 10*1000, 100, true); // // assertNull(GemFireCacheImpl.getInstance()); try { CacheFactory.getAnyInstance(); fail("CacheFactory.getAnyInstance() should throw CacheClosedException"); } catch (CacheClosedException e) { // pass } catch (DistributedSystemDisconnectedException e) { boolean passed = false; for (Throwable cause = e.getCause(); cause != null;) { if (cause instanceof CacheClosedException) { passed = true; break; } } if (!passed) { throw e; } } //assertNull(InternalDistributedSystem.getAnyInstance()); assertFalse(InternalDistributedSystem.getAnyInstance().isConnected()); } @SuppressWarnings("rawtypes") protected static Region createRegion(Cache cache, RegionShortcut shortcut, String name) { return cache.createRegionFactory(shortcut).setEnableOffHeapMemory(true).create(name); } protected static final AtomicReference<Cache> cache = new AtomicReference<Cache>(); protected static final AtomicReference<DistributedSystem> system = new AtomicReference<DistributedSystem>(); protected static final AtomicBoolean isSmallerVM = new AtomicBoolean(); public void testOtherMembersSeeOutOfOffHeapMemoryMemberDisconnects() { assertEquals(4, Host.getHost(0).getVMCount()); final String name = getRegionName(); final RegionShortcut shortcut = getRegionShortcut(); final int smallerVM = 1; final int count = Host.getHost(0).getVMCount(); Host.getHost(0).getVM(smallerVM).invoke(new SerializableRunnable() { public void run() { OutOfOffHeapMemoryDUnitTest.isSmallerVM.set(true); } }); for (int i = 0; i < Host.getHost(0).getVMCount(); i++) { Host.getHost(0).getVM(i).invoke(new SerializableRunnable() { public void run() { OutOfOffHeapMemoryDUnitTest.cache.set(getCache()); OutOfOffHeapMemoryDUnitTest.system.set(getSystem()); Region<Object, Object> region = OutOfOffHeapMemoryDUnitTest.cache.get(). createRegionFactory(shortcut).setEnableOffHeapMemory(true).create(name); assertNotNull(region); } }); } for (int i = 0; i < count; i++) { Host.getHost(0).getVM(i).invoke(new SerializableRunnable() { public void run() { assertFalse(OutOfOffHeapMemoryDUnitTest.cache.get().isClosed()); assertTrue(OutOfOffHeapMemoryDUnitTest.system.get().isConnected()); int countMembersPlusLocator = count+2; // add one for locator int countOtherMembers = count-1; // subtract one for self assertEquals(countMembersPlusLocator, ((InternalDistributedSystem)OutOfOffHeapMemoryDUnitTest. system.get()).getDistributionManager().getDistributionManagerIds().size()); assertEquals(countOtherMembers, ((DistributedRegion)OutOfOffHeapMemoryDUnitTest. cache.get().getRegion(name)).getDistributionAdvisor().getNumProfiles()); } }); } Host.getHost(0).getVM(smallerVM).invoke(new SerializableRunnable() { public void run() { //OutOfOffHeapMemoryDUnitTest.cache.get().getLogger().info(ADD_EXPECTED_EXCEPTIONS); getLogWriter().info(ADD_EXPECTED_EXCEPTIONS); } }); // perform puts in bigger member until smaller member goes OOOHME Host.getHost(0).getVM(0).invoke(new SerializableRunnable() { public void run() { final long TIME_LIMIT = 30 * 1000; final StopWatch stopWatch = new StopWatch(true); int i = 0; int countOtherMembers = count-1; Region<Object, Object> region = OutOfOffHeapMemoryDUnitTest.cache.get().getRegion(name); for (i = 0; countOtherMembers > count-2; i++) { region.put("key-"+i, new byte[1024]); countOtherMembers = ((DistributedRegion)OutOfOffHeapMemoryDUnitTest. cache.get().getRegion(name)).getDistributionAdvisor().getNumProfiles(); assertTrue("puts failed to push member out of off-heap memory within time limit", stopWatch.elapsedTimeMillis() < TIME_LIMIT); } assertEquals("Member did not depart from OutOfOffHeapMemory", count-2, countOtherMembers); } }); // verify that member with OOOHME closed Host.getHost(0).getVM(smallerVM).invoke(new SerializableRunnable() { public void run() { // assertTrue(OutOfOffHeapMemoryDUnitTest.cache.get().isClosed()); // assertFalse(OutOfOffHeapMemoryDUnitTest.system.get().isConnected()); final WaitCriterion waitForDisconnect = new WaitCriterion() { public boolean done() { return OutOfOffHeapMemoryDUnitTest.cache.get().isClosed() && !OutOfOffHeapMemoryDUnitTest.system.get().isConnected(); } public String description() { return "Waiting for disconnect to complete"; } }; waitForCriterion(waitForDisconnect, 10*1000, 100, true); } }); // verify that closed member is closed according to all members for (int i = 0; i < count; i++) { if (i == smallerVM) { continue; } Host.getHost(0).getVM(i).invoke(new SerializableRunnable() { public void run() { final int countMembersPlusLocator = count+2-1; // add one for locator (minus one for OOOHME member) final int countOtherMembers = count-1-1; // subtract one for self (minus one for OOOHME member) assertEquals(countMembersPlusLocator, ((InternalDistributedSystem)OutOfOffHeapMemoryDUnitTest. system.get()).getDistributionManager().getDistributionManagerIds().size()); // final WaitCriterion waitForView = new WaitCriterion() { // public boolean done() { // return ((InternalDistributedSystem)OutOfOffHeapMemoryDUnitTest.system.get()).getDistributionManager().getDistributionManagerIds().size() == countMembersPlusLocator; // } // public String description() { // return "Waiting for OOOHM to depart view"; // } // }; // waitForCriterion(waitForView, 10*1000, 100, true); assertEquals(countOtherMembers, ((DistributedRegion)OutOfOffHeapMemoryDUnitTest. cache.get().getRegion(name)).getDistributionAdvisor().getNumProfiles()); // final WaitCriterion waitForProfiles = new WaitCriterion() { // public boolean done() { // return ((DistributedRegion)OutOfOffHeapMemoryDUnitTest.cache.get().getRegion(name)).getDistributionAdvisor().getNumProfiles() == countOtherMembers; // } // public String description() { // return "Waiting for OOOHM to depart profiles"; // } // }; // waitForCriterion(waitForProfiles, 10*1000, 100, true); } }); } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.beem.project.btf.ui.loadimages; import java.lang.ref.WeakReference; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.support.v4.app.FragmentActivity; import android.util.Log; import android.widget.ImageView; import com.beem.project.btf.BuildConfig; /** * This class wraps up completing some arbitrary long running work when loading a bitmap to an * ImageView. It handles things like using a memory and disk cache, running the work in a background * thread and setting a placeholder image. */ public abstract class ImageWorker { private static final String TAG = "ImageWorker"; private static final int FADE_IN_TIME = 200; private ImageCache mImageCache; private ImageCache.ImageCacheParams mImageCacheParams; private Bitmap mLoadingBitmap; private boolean mFadeInBitmap = true; private boolean mExitTasksEarly = false; protected boolean mPauseWork = false; private final Object mPauseWorkLock = new Object(); protected Resources mResources; private static final int MESSAGE_CLEAR = 0; private static final int MESSAGE_INIT_DISK_CACHE = 1; private static final int MESSAGE_FLUSH = 2; private static final int MESSAGE_CLOSE = 3; protected ImageWorker(Context context) { mResources = context.getResources(); } /** * Load an image specified by the data parameter into an ImageView (override * {@link ImageWorker#processBitmap(Object)} to define the processing logic). A memory and disk * cache will be used if an {@link ImageCache} has been added using * {@link ImageWorker#addImageCache(android.support.v4.app.FragmentManager, ImageCache.ImageCacheParams)} * . If the image is found in the memory cache, it is set immediately, otherwise an * {@link AsyncTask} will be created to asynchronously load the bitmap. * @param data The URL of the image to download. * @param imageView The ImageView to bind the downloaded image to. */ public void loadImage(Object data, ImageView imageView) { if (data == null) { return; } BitmapDrawable value = null; if (mImageCache != null) { value = mImageCache.getBitmapFromMemCache(String.valueOf(data)); } if (value != null) { // Bitmap found in memory cache imageView.setImageDrawable(value); } else if (cancelPotentialWork(data, imageView)) { // BEGIN_INCLUDE(execute_background_task) final BitmapWorkerTask task = new BitmapWorkerTask(data, imageView); final AsyncDrawable asyncDrawable = new AsyncDrawable(mResources, mLoadingBitmap, task); imageView.setImageDrawable(asyncDrawable); // NOTE: This uses a custom version of AsyncTask that has been pulled from the // framework and slightly modified. Refer to the docs at the top of the class // for more info on what was changed. task.executeOnExecutor(AsyncTask.DUAL_THREAD_EXECUTOR); // END_INCLUDE(execute_background_task) } } /** * Set placeholder bitmap that shows when the the background thread is running. * @param bitmap */ public void setLoadingImage(Bitmap bitmap) { mLoadingBitmap = bitmap; } /** * Set placeholder bitmap that shows when the the background thread is running. * @param resId */ public void setLoadingImage(int resId) { mLoadingBitmap = BitmapFactory.decodeResource(mResources, resId); } /** * Adds an {@link ImageCache} to this {@link ImageWorker} to handle disk and memory bitmap * caching. * @param fragmentManager * @param cacheParams The cache parameters to use for the image cache. */ public void addImageCache(ImageCache.ImageCacheParams cacheParams) { mImageCacheParams = cacheParams; mImageCache = ImageCache.getInstance(mImageCacheParams); new CacheAsyncTask().execute(MESSAGE_INIT_DISK_CACHE); } /** * Adds an {@link ImageCache} to this {@link ImageWorker} to handle disk and memory bitmap * caching. * @param activity * @param diskCacheDirectoryName See * {@link ImageCache.ImageCacheParams#ImageCacheParams(android.content.Context, String)} * . */ public void addImageCache(FragmentActivity activity, String diskCacheDirectoryName) { mImageCacheParams = new ImageCache.ImageCacheParams(activity, diskCacheDirectoryName); mImageCache = ImageCache.getInstance(mImageCacheParams); new CacheAsyncTask().execute(MESSAGE_INIT_DISK_CACHE); } /** * If set to true, the image will fade-in once it has been loaded by the background thread. */ public void setImageFadeIn(boolean fadeIn) { mFadeInBitmap = fadeIn; } public void setExitTasksEarly(boolean exitTasksEarly) { mExitTasksEarly = exitTasksEarly; setPauseWork(false); } /** * Subclasses should override this to define any processing or work that must happen to produce * the final bitmap. This will be executed in a background thread and be long running. For * example, you could resize a large bitmap here, or pull down an image from the network. * @param data The data to identify which image to process, as provided by * {@link ImageWorker#loadImage(Object, android.widget.ImageView)} * @return The processed bitmap */ protected abstract Bitmap processBitmap(Object data); /** * @return The {@link ImageCache} object currently being used by this ImageWorker. */ protected ImageCache getImageCache() { return mImageCache; } /** * Cancels any pending work attached to the provided ImageView. * @param imageView */ public static void cancelWork(ImageView imageView) { final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView); if (bitmapWorkerTask != null) { bitmapWorkerTask.cancel(true); if (BuildConfig.DEBUG) { final Object bitmapData = bitmapWorkerTask.mData; Log.d(TAG, "cancelWork - cancelled work for " + bitmapData); } } } /** * Returns true if the current work has been canceled or if there was no work in progress on * this image view. Returns false if the work in progress deals with the same data. The work is * not stopped in that case. */ public static boolean cancelPotentialWork(Object data, ImageView imageView) { // BEGIN_INCLUDE(cancel_potential_work) final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView); if (bitmapWorkerTask != null) { final Object bitmapData = bitmapWorkerTask.mData; if (bitmapData == null || !bitmapData.equals(data)) { bitmapWorkerTask.cancel(true); if (BuildConfig.DEBUG) { Log.d(TAG, "cancelPotentialWork - cancelled work for " + data); } } else { // The same work is already in progress. return false; } } return true; // END_INCLUDE(cancel_potential_work) } /** * @param imageView Any imageView * @return Retrieve the currently active work task (if any) associated with this imageView. null * if there is no such task. */ private static BitmapWorkerTask getBitmapWorkerTask(ImageView imageView) { if (imageView != null) { final Drawable drawable = imageView.getDrawable(); if (drawable instanceof AsyncDrawable) { final AsyncDrawable asyncDrawable = (AsyncDrawable) drawable; return asyncDrawable.getBitmapWorkerTask(); } } return null; } /** * The actual AsyncTask that will asynchronously process the image. */ private class BitmapWorkerTask extends AsyncTask<Void, Void, BitmapDrawable> { private Object mData; private final WeakReference<ImageView> imageViewReference; public BitmapWorkerTask(Object data, ImageView imageView) { mData = data; imageViewReference = new WeakReference<ImageView>(imageView); } /** * Background processing. */ @Override protected BitmapDrawable doInBackground(Void... params) { // BEGIN_INCLUDE(load_bitmap_in_background) if (BuildConfig.DEBUG) { Log.d(TAG, "doInBackground - starting work"); } final String dataString = String.valueOf(mData); Bitmap bitmap = null; BitmapDrawable drawable = null; // Wait here if work is paused and the task is not cancelled synchronized (mPauseWorkLock) { while (mPauseWork && !isCancelled()) { try { mPauseWorkLock.wait(); } catch (InterruptedException e) { } } } // If the image cache is available and this task has not been cancelled by another // thread and the ImageView that was originally bound to this task is still bound back // to this task and our "exit early" flag is not set then try and fetch the bitmap from // the cache if (mImageCache != null && !isCancelled() && getAttachedImageView() != null && !mExitTasksEarly) { bitmap = mImageCache.getBitmapFromDiskCache(dataString); } // If the bitmap was not found in the cache and this task has not been cancelled by // another thread and the ImageView that was originally bound to this task is still // bound back to this task and our "exit early" flag is not set, then call the main // process method (as implemented by a subclass) if (bitmap == null && !isCancelled() && getAttachedImageView() != null && !mExitTasksEarly) { bitmap = processBitmap(mData); } // If the bitmap was processed and the image cache is available, then add the processed // bitmap to the cache for future use. Note we don't check if the task was cancelled // here, if it was, and the thread is still running, we may as well add the processed // bitmap to our cache as it might be used again in the future if (bitmap != null) { if (Build.VERSION.SDK_INT >= 11) { // Running on Honeycomb or newer, so wrap in a standard BitmapDrawable drawable = new BitmapDrawable(mResources, bitmap); } else { // Running on Gingerbread or older, so wrap in a RecyclingBitmapDrawable // which will recycle automagically drawable = new RecyclingBitmapDrawable(mResources, bitmap); } if (mImageCache != null) { mImageCache.addBitmapToCache(dataString, drawable); } } if (BuildConfig.DEBUG) { Log.d(TAG, "doInBackground - finished work"); } return drawable; // END_INCLUDE(load_bitmap_in_background) } /** * Once the image is processed, associates it to the imageView */ @Override protected void onPostExecute(BitmapDrawable value) { // BEGIN_INCLUDE(complete_background_work) // if cancel was called on this task or the "exit early" flag is set then we're done if (isCancelled() || mExitTasksEarly) { value = null; } final ImageView imageView = getAttachedImageView(); if (value != null && imageView != null) { if (BuildConfig.DEBUG) { Log.d(TAG, "onPostExecute - setting bitmap"); } setImageDrawable(imageView, value); } // END_INCLUDE(complete_background_work) } @Override protected void onCancelled(BitmapDrawable value) { super.onCancelled(value); synchronized (mPauseWorkLock) { mPauseWorkLock.notifyAll(); } } /** * Returns the ImageView associated with this task as long as the ImageView's task still * points to this task as well. Returns null otherwise. */ private ImageView getAttachedImageView() { final ImageView imageView = imageViewReference.get(); final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView); if (this == bitmapWorkerTask) { return imageView; } return null; } } /** * A custom Drawable that will be attached to the imageView while the work is in progress. * Contains a reference to the actual worker task, so that it can be stopped if a new binding is * required, and makes sure that only the last started worker process can bind its result, * independently of the finish order. */ private static class AsyncDrawable extends BitmapDrawable { private final WeakReference<BitmapWorkerTask> bitmapWorkerTaskReference; public AsyncDrawable(Resources res, Bitmap bitmap, BitmapWorkerTask bitmapWorkerTask) { super(res, bitmap); bitmapWorkerTaskReference = new WeakReference<BitmapWorkerTask>( bitmapWorkerTask); } public BitmapWorkerTask getBitmapWorkerTask() { return bitmapWorkerTaskReference.get(); } } /** * Called when the processing is complete and the final drawable should be set on the ImageView. * @param imageView * @param drawable */ private void setImageDrawable(ImageView imageView, Drawable drawable) { /*if (mFadeInBitmap) { // Transition drawable with a transparent drawable and the final drawable final TransitionDrawable td = new TransitionDrawable(new Drawable[] { new ColorDrawable(android.R.color.transparent), drawable }); // Set background to loading bitmap imageView.setImageDrawable(td); td.startTransition(FADE_IN_TIME); } else {*/ imageView.setImageDrawable(drawable); // } } /** * Pause any ongoing background work. This can be used as a temporary measure to improve * performance. For example background work could be paused when a ListView or GridView is being * scrolled using a {@link android.widget.AbsListView.OnScrollListener} to keep scrolling * smooth. * <p> * If work is paused, be sure setPauseWork(false) is called again before your fragment or * activity is destroyed (for example during {@link android.app.Activity#onPause()}), or there * is a risk the background thread will never finish. */ public void setPauseWork(boolean pauseWork) { synchronized (mPauseWorkLock) { mPauseWork = pauseWork; if (!mPauseWork) { mPauseWorkLock.notifyAll(); } } } protected class CacheAsyncTask extends AsyncTask<Object, Void, Void> { @Override protected Void doInBackground(Object... params) { switch ((Integer) params[0]) { case MESSAGE_CLEAR: clearCacheInternal(); break; case MESSAGE_INIT_DISK_CACHE: initDiskCacheInternal(); break; case MESSAGE_FLUSH: flushCacheInternal(); break; case MESSAGE_CLOSE: closeCacheInternal(); break; } return null; } } protected void initDiskCacheInternal() { if (mImageCache != null) { mImageCache.initDiskCache(); } } protected void clearCacheInternal() { if (mImageCache != null) { mImageCache.clearCache(); } } protected void flushCacheInternal() { if (mImageCache != null) { mImageCache.flush(); } } protected void closeCacheInternal() { if (mImageCache != null) { mImageCache.close(); mImageCache = null; } } public void clearCache() { new CacheAsyncTask().execute(MESSAGE_CLEAR); } public void flushCache() { new CacheAsyncTask().execute(MESSAGE_FLUSH); } public void closeCache() { new CacheAsyncTask().execute(MESSAGE_CLOSE); } }
/* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.collections15.iterators; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.NoSuchElementException; /** * Tests the LoopingIterator class. * * @author Jonathan Carlson * @author Matt Hall, John Watkinson, Stephen Colebourne * @version $Revision: 1.1 $ $Date: 2005/10/11 19:11:58 $ */ public class TestLoopingIterator extends TestCase { public TestLoopingIterator(String testName) { super(testName); } public static Test suite() { return new TestSuite(TestLoopingIterator.class); } /** * Tests constructor exception. */ public void testConstructorEx() throws Exception { try { new LoopingIterator(null); fail(); } catch (NullPointerException ex) { } } /** * Tests whether an empty looping iterator works as designed. * * @throws Exception If something unexpected occurs. */ public void testLooping0() throws Exception { List list = new ArrayList(); LoopingIterator loop = new LoopingIterator(list); assertTrue("hasNext should return false", loop.hasNext() == false); try { loop.next(); fail("NoSuchElementException was not thrown during next() call."); } catch (NoSuchElementException ex) { } } /** * Tests whether a populated looping iterator works as designed. * * @throws Exception If something unexpected occurs. */ public void testLooping1() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a"})); LoopingIterator loop = new LoopingIterator(list); assertTrue("1st hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); assertTrue("2nd hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); assertTrue("3rd hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); } /** * Tests whether a populated looping iterator works as designed. * * @throws Exception If something unexpected occurs. */ public void testLooping2() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a", "b"})); LoopingIterator loop = new LoopingIterator(list); assertTrue("1st hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); assertTrue("2nd hasNext should return true", loop.hasNext()); assertEquals("b", loop.next()); assertTrue("3rd hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); } /** * Tests whether a populated looping iterator works as designed. * * @throws Exception If something unexpected occurs. */ public void testLooping3() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a", "b", "c"})); LoopingIterator loop = new LoopingIterator(list); assertTrue("1st hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); assertTrue("2nd hasNext should return true", loop.hasNext()); assertEquals("b", loop.next()); assertTrue("3rd hasNext should return true", loop.hasNext()); assertEquals("c", loop.next()); assertTrue("4th hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); } /** * Tests the remove() method on a LoopingIterator wrapped ArrayList. * * @throws Exception If something unexpected occurs. */ public void testRemoving1() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a", "b", "c"})); LoopingIterator loop = new LoopingIterator(list); assertEquals("list should have 3 elements.", 3, list.size()); assertTrue("1st hasNext should return true", loop.hasNext()); assertEquals("a", loop.next()); loop.remove(); // removes a assertEquals("list should have 2 elements.", 2, list.size()); assertTrue("2nd hasNext should return true", loop.hasNext()); assertEquals("b", loop.next()); loop.remove(); // removes b assertEquals("list should have 1 elements.", 1, list.size()); assertTrue("3rd hasNext should return true", loop.hasNext()); assertEquals("c", loop.next()); loop.remove(); // removes c assertEquals("list should have 0 elements.", 0, list.size()); assertTrue("4th hasNext should return false", loop.hasNext() == false); try { loop.next(); fail("Expected NoSuchElementException to be thrown."); } catch (NoSuchElementException ex) { } } /** * Tests the reset() method on a LoopingIterator wrapped ArrayList. * * @throws Exception If something unexpected occurs. */ public void testReset() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a", "b", "c"})); LoopingIterator loop = new LoopingIterator(list); assertEquals("a", loop.next()); assertEquals("b", loop.next()); loop.reset(); assertEquals("a", loop.next()); loop.reset(); assertEquals("a", loop.next()); assertEquals("b", loop.next()); assertEquals("c", loop.next()); loop.reset(); assertEquals("a", loop.next()); assertEquals("b", loop.next()); assertEquals("c", loop.next()); } /** * Tests the size() method on a LoopingIterator wrapped ArrayList. * * @throws Exception If something unexpected occurs. */ public void testSize() throws Exception { List list = new ArrayList(Arrays.asList(new String[]{"a", "b", "c"})); LoopingIterator loop = new LoopingIterator(list); assertEquals(3, loop.size()); loop.next(); loop.next(); assertEquals(3, loop.size()); loop.reset(); assertEquals(3, loop.size()); loop.next(); loop.remove(); assertEquals(2, loop.size()); } }
package jp.scriptkidie.datacopy.sql.model.educationdb; import java.util.ArrayList; import java.util.List; public class ReceptionPartiesExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public ReceptionPartiesExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andReceptionNoIsNull() { addCriterion("RECEPTION_NO is null"); return (Criteria) this; } public Criteria andReceptionNoIsNotNull() { addCriterion("RECEPTION_NO is not null"); return (Criteria) this; } public Criteria andReceptionNoEqualTo(String value) { addCriterion("RECEPTION_NO =", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoNotEqualTo(String value) { addCriterion("RECEPTION_NO <>", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoGreaterThan(String value) { addCriterion("RECEPTION_NO >", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoGreaterThanOrEqualTo(String value) { addCriterion("RECEPTION_NO >=", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoLessThan(String value) { addCriterion("RECEPTION_NO <", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoLessThanOrEqualTo(String value) { addCriterion("RECEPTION_NO <=", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoLike(String value) { addCriterion("RECEPTION_NO like", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoNotLike(String value) { addCriterion("RECEPTION_NO not like", value, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoIn(List<String> values) { addCriterion("RECEPTION_NO in", values, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoNotIn(List<String> values) { addCriterion("RECEPTION_NO not in", values, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoBetween(String value1, String value2) { addCriterion("RECEPTION_NO between", value1, value2, "receptionNo"); return (Criteria) this; } public Criteria andReceptionNoNotBetween(String value1, String value2) { addCriterion("RECEPTION_NO not between", value1, value2, "receptionNo"); return (Criteria) this; } public Criteria andReceptionPartyDivIsNull() { addCriterion("RECEPTION_PARTY_DIV is null"); return (Criteria) this; } public Criteria andReceptionPartyDivIsNotNull() { addCriterion("RECEPTION_PARTY_DIV is not null"); return (Criteria) this; } public Criteria andReceptionPartyDivEqualTo(String value) { addCriterion("RECEPTION_PARTY_DIV =", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivNotEqualTo(String value) { addCriterion("RECEPTION_PARTY_DIV <>", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivGreaterThan(String value) { addCriterion("RECEPTION_PARTY_DIV >", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivGreaterThanOrEqualTo(String value) { addCriterion("RECEPTION_PARTY_DIV >=", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivLessThan(String value) { addCriterion("RECEPTION_PARTY_DIV <", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivLessThanOrEqualTo(String value) { addCriterion("RECEPTION_PARTY_DIV <=", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivLike(String value) { addCriterion("RECEPTION_PARTY_DIV like", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivNotLike(String value) { addCriterion("RECEPTION_PARTY_DIV not like", value, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivIn(List<String> values) { addCriterion("RECEPTION_PARTY_DIV in", values, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivNotIn(List<String> values) { addCriterion("RECEPTION_PARTY_DIV not in", values, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivBetween(String value1, String value2) { addCriterion("RECEPTION_PARTY_DIV between", value1, value2, "receptionPartyDiv"); return (Criteria) this; } public Criteria andReceptionPartyDivNotBetween(String value1, String value2) { addCriterion("RECEPTION_PARTY_DIV not between", value1, value2, "receptionPartyDiv"); return (Criteria) this; } public Criteria andLocationNoIsNull() { addCriterion("LOCATION_NO is null"); return (Criteria) this; } public Criteria andLocationNoIsNotNull() { addCriterion("LOCATION_NO is not null"); return (Criteria) this; } public Criteria andLocationNoEqualTo(String value) { addCriterion("LOCATION_NO =", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoNotEqualTo(String value) { addCriterion("LOCATION_NO <>", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoGreaterThan(String value) { addCriterion("LOCATION_NO >", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoGreaterThanOrEqualTo(String value) { addCriterion("LOCATION_NO >=", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoLessThan(String value) { addCriterion("LOCATION_NO <", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoLessThanOrEqualTo(String value) { addCriterion("LOCATION_NO <=", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoLike(String value) { addCriterion("LOCATION_NO like", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoNotLike(String value) { addCriterion("LOCATION_NO not like", value, "locationNo"); return (Criteria) this; } public Criteria andLocationNoIn(List<String> values) { addCriterion("LOCATION_NO in", values, "locationNo"); return (Criteria) this; } public Criteria andLocationNoNotIn(List<String> values) { addCriterion("LOCATION_NO not in", values, "locationNo"); return (Criteria) this; } public Criteria andLocationNoBetween(String value1, String value2) { addCriterion("LOCATION_NO between", value1, value2, "locationNo"); return (Criteria) this; } public Criteria andLocationNoNotBetween(String value1, String value2) { addCriterion("LOCATION_NO not between", value1, value2, "locationNo"); return (Criteria) this; } public Criteria andNameKanjiIsNull() { addCriterion("NAME_KANJI is null"); return (Criteria) this; } public Criteria andNameKanjiIsNotNull() { addCriterion("NAME_KANJI is not null"); return (Criteria) this; } public Criteria andNameKanjiEqualTo(String value) { addCriterion("NAME_KANJI =", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiNotEqualTo(String value) { addCriterion("NAME_KANJI <>", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiGreaterThan(String value) { addCriterion("NAME_KANJI >", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiGreaterThanOrEqualTo(String value) { addCriterion("NAME_KANJI >=", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiLessThan(String value) { addCriterion("NAME_KANJI <", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiLessThanOrEqualTo(String value) { addCriterion("NAME_KANJI <=", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiLike(String value) { addCriterion("NAME_KANJI like", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiNotLike(String value) { addCriterion("NAME_KANJI not like", value, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiIn(List<String> values) { addCriterion("NAME_KANJI in", values, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiNotIn(List<String> values) { addCriterion("NAME_KANJI not in", values, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiBetween(String value1, String value2) { addCriterion("NAME_KANJI between", value1, value2, "nameKanji"); return (Criteria) this; } public Criteria andNameKanjiNotBetween(String value1, String value2) { addCriterion("NAME_KANJI not between", value1, value2, "nameKanji"); return (Criteria) this; } public Criteria andNameKanaIsNull() { addCriterion("NAME_KANA is null"); return (Criteria) this; } public Criteria andNameKanaIsNotNull() { addCriterion("NAME_KANA is not null"); return (Criteria) this; } public Criteria andNameKanaEqualTo(String value) { addCriterion("NAME_KANA =", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaNotEqualTo(String value) { addCriterion("NAME_KANA <>", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaGreaterThan(String value) { addCriterion("NAME_KANA >", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaGreaterThanOrEqualTo(String value) { addCriterion("NAME_KANA >=", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaLessThan(String value) { addCriterion("NAME_KANA <", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaLessThanOrEqualTo(String value) { addCriterion("NAME_KANA <=", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaLike(String value) { addCriterion("NAME_KANA like", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaNotLike(String value) { addCriterion("NAME_KANA not like", value, "nameKana"); return (Criteria) this; } public Criteria andNameKanaIn(List<String> values) { addCriterion("NAME_KANA in", values, "nameKana"); return (Criteria) this; } public Criteria andNameKanaNotIn(List<String> values) { addCriterion("NAME_KANA not in", values, "nameKana"); return (Criteria) this; } public Criteria andNameKanaBetween(String value1, String value2) { addCriterion("NAME_KANA between", value1, value2, "nameKana"); return (Criteria) this; } public Criteria andNameKanaNotBetween(String value1, String value2) { addCriterion("NAME_KANA not between", value1, value2, "nameKana"); return (Criteria) this; } public Criteria andAddress1IsNull() { addCriterion("ADDRESS_1 is null"); return (Criteria) this; } public Criteria andAddress1IsNotNull() { addCriterion("ADDRESS_1 is not null"); return (Criteria) this; } public Criteria andAddress1EqualTo(String value) { addCriterion("ADDRESS_1 =", value, "address1"); return (Criteria) this; } public Criteria andAddress1NotEqualTo(String value) { addCriterion("ADDRESS_1 <>", value, "address1"); return (Criteria) this; } public Criteria andAddress1GreaterThan(String value) { addCriterion("ADDRESS_1 >", value, "address1"); return (Criteria) this; } public Criteria andAddress1GreaterThanOrEqualTo(String value) { addCriterion("ADDRESS_1 >=", value, "address1"); return (Criteria) this; } public Criteria andAddress1LessThan(String value) { addCriterion("ADDRESS_1 <", value, "address1"); return (Criteria) this; } public Criteria andAddress1LessThanOrEqualTo(String value) { addCriterion("ADDRESS_1 <=", value, "address1"); return (Criteria) this; } public Criteria andAddress1Like(String value) { addCriterion("ADDRESS_1 like", value, "address1"); return (Criteria) this; } public Criteria andAddress1NotLike(String value) { addCriterion("ADDRESS_1 not like", value, "address1"); return (Criteria) this; } public Criteria andAddress1In(List<String> values) { addCriterion("ADDRESS_1 in", values, "address1"); return (Criteria) this; } public Criteria andAddress1NotIn(List<String> values) { addCriterion("ADDRESS_1 not in", values, "address1"); return (Criteria) this; } public Criteria andAddress1Between(String value1, String value2) { addCriterion("ADDRESS_1 between", value1, value2, "address1"); return (Criteria) this; } public Criteria andAddress1NotBetween(String value1, String value2) { addCriterion("ADDRESS_1 not between", value1, value2, "address1"); return (Criteria) this; } public Criteria andAddress2IsNull() { addCriterion("ADDRESS_2 is null"); return (Criteria) this; } public Criteria andAddress2IsNotNull() { addCriterion("ADDRESS_2 is not null"); return (Criteria) this; } public Criteria andAddress2EqualTo(String value) { addCriterion("ADDRESS_2 =", value, "address2"); return (Criteria) this; } public Criteria andAddress2NotEqualTo(String value) { addCriterion("ADDRESS_2 <>", value, "address2"); return (Criteria) this; } public Criteria andAddress2GreaterThan(String value) { addCriterion("ADDRESS_2 >", value, "address2"); return (Criteria) this; } public Criteria andAddress2GreaterThanOrEqualTo(String value) { addCriterion("ADDRESS_2 >=", value, "address2"); return (Criteria) this; } public Criteria andAddress2LessThan(String value) { addCriterion("ADDRESS_2 <", value, "address2"); return (Criteria) this; } public Criteria andAddress2LessThanOrEqualTo(String value) { addCriterion("ADDRESS_2 <=", value, "address2"); return (Criteria) this; } public Criteria andAddress2Like(String value) { addCriterion("ADDRESS_2 like", value, "address2"); return (Criteria) this; } public Criteria andAddress2NotLike(String value) { addCriterion("ADDRESS_2 not like", value, "address2"); return (Criteria) this; } public Criteria andAddress2In(List<String> values) { addCriterion("ADDRESS_2 in", values, "address2"); return (Criteria) this; } public Criteria andAddress2NotIn(List<String> values) { addCriterion("ADDRESS_2 not in", values, "address2"); return (Criteria) this; } public Criteria andAddress2Between(String value1, String value2) { addCriterion("ADDRESS_2 between", value1, value2, "address2"); return (Criteria) this; } public Criteria andAddress2NotBetween(String value1, String value2) { addCriterion("ADDRESS_2 not between", value1, value2, "address2"); return (Criteria) this; } public Criteria andAddress3IsNull() { addCriterion("ADDRESS_3 is null"); return (Criteria) this; } public Criteria andAddress3IsNotNull() { addCriterion("ADDRESS_3 is not null"); return (Criteria) this; } public Criteria andAddress3EqualTo(String value) { addCriterion("ADDRESS_3 =", value, "address3"); return (Criteria) this; } public Criteria andAddress3NotEqualTo(String value) { addCriterion("ADDRESS_3 <>", value, "address3"); return (Criteria) this; } public Criteria andAddress3GreaterThan(String value) { addCriterion("ADDRESS_3 >", value, "address3"); return (Criteria) this; } public Criteria andAddress3GreaterThanOrEqualTo(String value) { addCriterion("ADDRESS_3 >=", value, "address3"); return (Criteria) this; } public Criteria andAddress3LessThan(String value) { addCriterion("ADDRESS_3 <", value, "address3"); return (Criteria) this; } public Criteria andAddress3LessThanOrEqualTo(String value) { addCriterion("ADDRESS_3 <=", value, "address3"); return (Criteria) this; } public Criteria andAddress3Like(String value) { addCriterion("ADDRESS_3 like", value, "address3"); return (Criteria) this; } public Criteria andAddress3NotLike(String value) { addCriterion("ADDRESS_3 not like", value, "address3"); return (Criteria) this; } public Criteria andAddress3In(List<String> values) { addCriterion("ADDRESS_3 in", values, "address3"); return (Criteria) this; } public Criteria andAddress3NotIn(List<String> values) { addCriterion("ADDRESS_3 not in", values, "address3"); return (Criteria) this; } public Criteria andAddress3Between(String value1, String value2) { addCriterion("ADDRESS_3 between", value1, value2, "address3"); return (Criteria) this; } public Criteria andAddress3NotBetween(String value1, String value2) { addCriterion("ADDRESS_3 not between", value1, value2, "address3"); return (Criteria) this; } public Criteria andDistrictsNoIsNull() { addCriterion("DISTRICTS_NO is null"); return (Criteria) this; } public Criteria andDistrictsNoIsNotNull() { addCriterion("DISTRICTS_NO is not null"); return (Criteria) this; } public Criteria andDistrictsNoEqualTo(String value) { addCriterion("DISTRICTS_NO =", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoNotEqualTo(String value) { addCriterion("DISTRICTS_NO <>", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoGreaterThan(String value) { addCriterion("DISTRICTS_NO >", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoGreaterThanOrEqualTo(String value) { addCriterion("DISTRICTS_NO >=", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoLessThan(String value) { addCriterion("DISTRICTS_NO <", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoLessThanOrEqualTo(String value) { addCriterion("DISTRICTS_NO <=", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoLike(String value) { addCriterion("DISTRICTS_NO like", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoNotLike(String value) { addCriterion("DISTRICTS_NO not like", value, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoIn(List<String> values) { addCriterion("DISTRICTS_NO in", values, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoNotIn(List<String> values) { addCriterion("DISTRICTS_NO not in", values, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoBetween(String value1, String value2) { addCriterion("DISTRICTS_NO between", value1, value2, "districtsNo"); return (Criteria) this; } public Criteria andDistrictsNoNotBetween(String value1, String value2) { addCriterion("DISTRICTS_NO not between", value1, value2, "districtsNo"); return (Criteria) this; } public Criteria andTownCdIsNull() { addCriterion("TOWN_CD is null"); return (Criteria) this; } public Criteria andTownCdIsNotNull() { addCriterion("TOWN_CD is not null"); return (Criteria) this; } public Criteria andTownCdEqualTo(String value) { addCriterion("TOWN_CD =", value, "townCd"); return (Criteria) this; } public Criteria andTownCdNotEqualTo(String value) { addCriterion("TOWN_CD <>", value, "townCd"); return (Criteria) this; } public Criteria andTownCdGreaterThan(String value) { addCriterion("TOWN_CD >", value, "townCd"); return (Criteria) this; } public Criteria andTownCdGreaterThanOrEqualTo(String value) { addCriterion("TOWN_CD >=", value, "townCd"); return (Criteria) this; } public Criteria andTownCdLessThan(String value) { addCriterion("TOWN_CD <", value, "townCd"); return (Criteria) this; } public Criteria andTownCdLessThanOrEqualTo(String value) { addCriterion("TOWN_CD <=", value, "townCd"); return (Criteria) this; } public Criteria andTownCdLike(String value) { addCriterion("TOWN_CD like", value, "townCd"); return (Criteria) this; } public Criteria andTownCdNotLike(String value) { addCriterion("TOWN_CD not like", value, "townCd"); return (Criteria) this; } public Criteria andTownCdIn(List<String> values) { addCriterion("TOWN_CD in", values, "townCd"); return (Criteria) this; } public Criteria andTownCdNotIn(List<String> values) { addCriterion("TOWN_CD not in", values, "townCd"); return (Criteria) this; } public Criteria andTownCdBetween(String value1, String value2) { addCriterion("TOWN_CD between", value1, value2, "townCd"); return (Criteria) this; } public Criteria andTownCdNotBetween(String value1, String value2) { addCriterion("TOWN_CD not between", value1, value2, "townCd"); return (Criteria) this; } public Criteria andCityBlockCdIsNull() { addCriterion("CITY_BLOCK_CD is null"); return (Criteria) this; } public Criteria andCityBlockCdIsNotNull() { addCriterion("CITY_BLOCK_CD is not null"); return (Criteria) this; } public Criteria andCityBlockCdEqualTo(String value) { addCriterion("CITY_BLOCK_CD =", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdNotEqualTo(String value) { addCriterion("CITY_BLOCK_CD <>", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdGreaterThan(String value) { addCriterion("CITY_BLOCK_CD >", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdGreaterThanOrEqualTo(String value) { addCriterion("CITY_BLOCK_CD >=", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdLessThan(String value) { addCriterion("CITY_BLOCK_CD <", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdLessThanOrEqualTo(String value) { addCriterion("CITY_BLOCK_CD <=", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdLike(String value) { addCriterion("CITY_BLOCK_CD like", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdNotLike(String value) { addCriterion("CITY_BLOCK_CD not like", value, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdIn(List<String> values) { addCriterion("CITY_BLOCK_CD in", values, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdNotIn(List<String> values) { addCriterion("CITY_BLOCK_CD not in", values, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdBetween(String value1, String value2) { addCriterion("CITY_BLOCK_CD between", value1, value2, "cityBlockCd"); return (Criteria) this; } public Criteria andCityBlockCdNotBetween(String value1, String value2) { addCriterion("CITY_BLOCK_CD not between", value1, value2, "cityBlockCd"); return (Criteria) this; } public Criteria andTelNoIsNull() { addCriterion("TEL_NO is null"); return (Criteria) this; } public Criteria andTelNoIsNotNull() { addCriterion("TEL_NO is not null"); return (Criteria) this; } public Criteria andTelNoEqualTo(String value) { addCriterion("TEL_NO =", value, "telNo"); return (Criteria) this; } public Criteria andTelNoNotEqualTo(String value) { addCriterion("TEL_NO <>", value, "telNo"); return (Criteria) this; } public Criteria andTelNoGreaterThan(String value) { addCriterion("TEL_NO >", value, "telNo"); return (Criteria) this; } public Criteria andTelNoGreaterThanOrEqualTo(String value) { addCriterion("TEL_NO >=", value, "telNo"); return (Criteria) this; } public Criteria andTelNoLessThan(String value) { addCriterion("TEL_NO <", value, "telNo"); return (Criteria) this; } public Criteria andTelNoLessThanOrEqualTo(String value) { addCriterion("TEL_NO <=", value, "telNo"); return (Criteria) this; } public Criteria andTelNoLike(String value) { addCriterion("TEL_NO like", value, "telNo"); return (Criteria) this; } public Criteria andTelNoNotLike(String value) { addCriterion("TEL_NO not like", value, "telNo"); return (Criteria) this; } public Criteria andTelNoIn(List<String> values) { addCriterion("TEL_NO in", values, "telNo"); return (Criteria) this; } public Criteria andTelNoNotIn(List<String> values) { addCriterion("TEL_NO not in", values, "telNo"); return (Criteria) this; } public Criteria andTelNoBetween(String value1, String value2) { addCriterion("TEL_NO between", value1, value2, "telNo"); return (Criteria) this; } public Criteria andTelNoNotBetween(String value1, String value2) { addCriterion("TEL_NO not between", value1, value2, "telNo"); return (Criteria) this; } public Criteria andCompanyNameIsNull() { addCriterion("COMPANY_NAME is null"); return (Criteria) this; } public Criteria andCompanyNameIsNotNull() { addCriterion("COMPANY_NAME is not null"); return (Criteria) this; } public Criteria andCompanyNameEqualTo(String value) { addCriterion("COMPANY_NAME =", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameNotEqualTo(String value) { addCriterion("COMPANY_NAME <>", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameGreaterThan(String value) { addCriterion("COMPANY_NAME >", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameGreaterThanOrEqualTo(String value) { addCriterion("COMPANY_NAME >=", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameLessThan(String value) { addCriterion("COMPANY_NAME <", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameLessThanOrEqualTo(String value) { addCriterion("COMPANY_NAME <=", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameLike(String value) { addCriterion("COMPANY_NAME like", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameNotLike(String value) { addCriterion("COMPANY_NAME not like", value, "companyName"); return (Criteria) this; } public Criteria andCompanyNameIn(List<String> values) { addCriterion("COMPANY_NAME in", values, "companyName"); return (Criteria) this; } public Criteria andCompanyNameNotIn(List<String> values) { addCriterion("COMPANY_NAME not in", values, "companyName"); return (Criteria) this; } public Criteria andCompanyNameBetween(String value1, String value2) { addCriterion("COMPANY_NAME between", value1, value2, "companyName"); return (Criteria) this; } public Criteria andCompanyNameNotBetween(String value1, String value2) { addCriterion("COMPANY_NAME not between", value1, value2, "companyName"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.query.aggregation.groupby; import it.unimi.dsi.fastutil.PriorityQueue; import it.unimi.dsi.fastutil.objects.ObjectArrayPriorityQueue; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.linkedin.pinot.common.Utils; import com.linkedin.pinot.common.request.AggregationInfo; import com.linkedin.pinot.common.request.GroupBy; import com.linkedin.pinot.common.response.ServerInstance; import com.linkedin.pinot.common.utils.DataTable; import com.linkedin.pinot.core.query.aggregation.AggregationFunction; import com.linkedin.pinot.core.query.aggregation.AggregationFunctionFactory; import com.linkedin.pinot.core.query.utils.Pair; /** * GroupByAggregationService is initialized by aggregation functions and groupBys. * * */ public class AggregationGroupByOperatorService { private static final Logger LOGGER = LoggerFactory.getLogger(AggregationGroupByOperatorService.class); private final List<String> _groupByColumns; private final int _groupByTopN; private final List<AggregationFunction> _aggregationFunctionList; public AggregationGroupByOperatorService(List<AggregationInfo> aggregationInfos, GroupBy groupByQuery) { _aggregationFunctionList = AggregationFunctionFactory.getAggregationFunction(aggregationInfos); _groupByColumns = groupByQuery.getColumns(); _groupByTopN = (int) groupByQuery.getTopN(); } public static List<Map<String, Serializable>> transformDataTableToGroupByResult(DataTable dataTable) { List<Map<String, Serializable>> aggregationGroupByResults = new ArrayList<Map<String, Serializable>>(); for (int i = 0; i < dataTable.getNumberOfRows(); i++) { String key = dataTable.getString(i, 0); Map<String, Serializable> hashMap = (Map<String, Serializable>) dataTable.getObject(i, 1); aggregationGroupByResults.add(hashMap); } return aggregationGroupByResults; } public List<AggregationFunction> getAggregationFunctionList() { return _aggregationFunctionList; } public List<Map<String, Serializable>> reduceGroupByOperators(Map<ServerInstance, DataTable> instanceResponseMap) { if ((instanceResponseMap == null) || instanceResponseMap.isEmpty()) { return null; } List<Map<String, Serializable>> reducedResult = null; for (DataTable toBeReducedGroupByResults : instanceResponseMap.values()) { if (reducedResult == null) { if (toBeReducedGroupByResults != null) { reducedResult = transformDataTableToGroupByResult(toBeReducedGroupByResults); } } else { List<Map<String, Serializable>> toBeReducedResult = transformDataTableToGroupByResult(toBeReducedGroupByResults); for (int i = 0; i < reducedResult.size(); ++i) { for (String key : toBeReducedResult.get(i).keySet()) { if (reducedResult.get(i).containsKey(key)) { reducedResult.get(i).put( key, _aggregationFunctionList.get(i).combineTwoValues(reducedResult.get(i).get(key), toBeReducedResult.get(i).get(key))); } else { reducedResult.get(i).put(key, toBeReducedResult.get(i).get(key)); } } } } } if (reducedResult != null) { for (int i = 0; i < reducedResult.size(); ++i) { Map<String, Serializable> functionLevelReducedResult = reducedResult.get(i); for (String key : functionLevelReducedResult.keySet()) { if (functionLevelReducedResult.get(key) != null) { functionLevelReducedResult.put(key, _aggregationFunctionList.get(i).reduce(Arrays.asList(functionLevelReducedResult.get(key)))); } } } } return reducedResult; } public List<JSONObject> renderGroupByOperators(List<Map<String, Serializable>> finalAggregationResult) { try { if (finalAggregationResult == null || finalAggregationResult.size() != _aggregationFunctionList.size()) { return null; } List<JSONObject> retJsonResultList = new ArrayList<JSONObject>(); for (int i = 0; i < _aggregationFunctionList.size(); ++i) { JSONArray groupByResultsArray = new JSONArray(); int groupSize = _groupByColumns.size(); Map<String, Serializable> reducedGroupByResult = finalAggregationResult.get(i); if (!reducedGroupByResult.isEmpty()) { PriorityQueue priorityQueue = getPriorityQueue(_aggregationFunctionList.get(i), reducedGroupByResult.values().iterator().next()); if (priorityQueue != null) { for (String groupedKey : reducedGroupByResult.keySet()) { priorityQueue.enqueue(new Pair(reducedGroupByResult.get(groupedKey), groupedKey)); if (priorityQueue.size() == (_groupByTopN + 1)) { priorityQueue.dequeue(); } } int realGroupSize = _groupByTopN; if (priorityQueue.size() < _groupByTopN) { realGroupSize = priorityQueue.size(); } for (int j = 0; j < realGroupSize; ++j) { JSONObject groupByResultObject = new JSONObject(); Pair res = (Pair) priorityQueue.dequeue(); groupByResultObject.put( "group", new JSONArray(((String) res.getSecond()).split( GroupByConstants.GroupByDelimiter.groupByMultiDelimeter.toString(), groupSize))); // if (res.getFirst() instanceof Number) { // groupByResultObject.put("value", df.format(res.getFirst())); // } else { // groupByResultObject.put("value", res.getFirst()); // } // groupByResultsArray.put(realGroupSize - 1 - j, groupByResultObject); groupByResultObject.put("value", _aggregationFunctionList.get(i).render((Serializable) res.getFirst()).get("value")); groupByResultsArray.put(realGroupSize - 1 - j, groupByResultObject); } } } JSONObject result = new JSONObject(); result.put("function", _aggregationFunctionList.get(i).getFunctionName()); result.put("groupByResult", groupByResultsArray); result.put("groupByColumns", new JSONArray(_groupByColumns)); retJsonResultList.add(result); } return retJsonResultList; } catch (JSONException e) { LOGGER.error("Caught exception while processing group by aggregation", e); Utils.rethrowException(e); throw new AssertionError("Should not reach this"); } } public void trimToSize(List<Map<String, Serializable>> aggregationGroupByResultList) { if (aggregationGroupByResultList == null) { return; } for (int i = 0; i < aggregationGroupByResultList.size(); ++i) { if (aggregationGroupByResultList.get(i).size() > (_groupByTopN * 20)) { trimToSize(_aggregationFunctionList.get(i), aggregationGroupByResultList.get(i), _groupByTopN * 5); } } } private void trimToSize(AggregationFunction aggregationFunction, Map<String, Serializable> aggregationGroupByResult, int trimSize) { PriorityQueue priorityQueue = getPriorityQueue(aggregationFunction, aggregationGroupByResult.values().iterator().next()); if (priorityQueue == null) { return; } for (String groupedKey : aggregationGroupByResult.keySet()) { priorityQueue.enqueue(new Pair(aggregationGroupByResult.get(groupedKey), groupedKey)); if (priorityQueue.size() == (_groupByTopN + 1)) { priorityQueue.dequeue(); } } for (int i = 0; i < (priorityQueue.size() - trimSize); ++i) { Pair res = (Pair) priorityQueue.dequeue(); aggregationGroupByResult.remove(res.getSecond()); } } private PriorityQueue getPriorityQueue(AggregationFunction aggregationFunction, Serializable sampleValue) { if (sampleValue instanceof Comparable) { if (aggregationFunction.getFunctionName().startsWith("min_")) { return new customPriorityQueue().getGroupedValuePairPriorityQueue((Comparable) sampleValue, true); } else { return new customPriorityQueue().getGroupedValuePairPriorityQueue((Comparable) sampleValue, false); } } return null; } class customPriorityQueue<T extends Comparable> { private PriorityQueue getGroupedValuePairPriorityQueue(T object, boolean isMinPriorityQueue) { if (isMinPriorityQueue) { return new ObjectArrayPriorityQueue<Pair<T, String>>(_groupByTopN + 1, new Comparator() { @Override public int compare(Object o1, Object o2) { if (((Pair<T, String>) o1).getFirst().compareTo(((Pair<T, String>) o2).getFirst()) < 0) { return 1; } else { if (((Pair<T, String>) o1).getFirst().compareTo(((Pair<T, String>) o2).getFirst()) > 0) { return -1; } } return 0; } }); } else { return new ObjectArrayPriorityQueue<Pair<T, String>>(_groupByTopN + 1, new Comparator() { @Override public int compare(Object o1, Object o2) { if (((Pair<T, String>) o1).getFirst().compareTo(((Pair<T, String>) o2).getFirst()) < 0) { return -1; } else { if (((Pair<T, String>) o1).getFirst().compareTo(((Pair<T, String>) o2).getFirst()) > 0) { return 1; } } return 0; } }); } } } }
import java.io.IOException; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.concurrent.TimeUnit; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.TimeZone; import java.util.TreeSet; import javax.xml.bind.DatatypeConverter; import com.datastax.driver.core.*; public class AcornYoutube { public static void main(String[] args) throws Exception { try { Conf.Init(args); DC.Init(); if (Conf.acornYoutubeOptions.test_number_of_reqs_per_dc) { YoutubeData.PrintNumReqsPerDc(); return; } // Overlap Cass.Init() and YouTube.Load() to save time. Cons.P()s are // messed up, but not a big deal. Thread tCassInit = new Thread() { public void run() { try { Cass.Init(); } catch (Exception e) { System.out.printf("Exception: %s\n%s\n", e, Util.GetStackTrace(e)); System.exit(1); } } }; tCassInit.start(); Thread tYoutubeDataLoad = new Thread() { public void run() { try { // This needs to be after DC.Init(), but can be overlapped with Cass.Init(). YoutubeData.Load(); } catch (Exception e) { System.out.printf("Exception: %s\n%s\n", e, Util.GetStackTrace(e)); System.exit(1); } } }; tYoutubeDataLoad.start(); tCassInit.join(); tYoutubeDataLoad.join(); CreateSchema(); MakeRequests(); // Seems like leftover Cassandra cluster and session objects prevent the // process from terminating. Force quit. Don't think it's a big deal for // this experiment. System.exit(0); } catch (Exception e) { System.out.printf("Exception: %s\n%s\n", e, Util.GetStackTrace(e)); System.exit(1); } } private static void CreateSchema() throws Exception { // Note: Assume us-east is always there as a leader. if (Cass.LocalDC().equals("us-east")) { if (Cass.SchemaExist()) { Cons.P("Schema already exists."); } else { Cass.CreateSchema(); } } Cass.WaitForSchemaCreation(); } private static void MakeRequests() throws Exception { int numThreads = Conf.acornYoutubeOptions.num_threads; List<Thread> reqThreads = new ArrayList<Thread>(); for (int i = 0; i < numThreads; i ++) { Thread t = new Thread(new ReqThread()); reqThreads.add(t); } _AgreeOnStartTime(); Cons.P("Making requests ..."); for (Thread t: reqThreads) t.start(); // Some requests never finishes when Cassandra fails to write or read. // Ignore them after maxWaitTime. All will be joined 2 seconds after they // are done. long maxWaitTime = (SimTime.GetStartSimulationTime() - System.currentTimeMillis()) + Conf.acornYoutubeOptions.simulation_time_dur_in_ms + Conf.acornYoutubeOptions.read_req_delay_in_simulation_time_in_ms + 2000; Cons.P(" maxWaitTime: %d ms", maxWaitTime); ProgMon.Start(); Thread.sleep(maxWaitTime); int i = 0; for (Thread t: reqThreads) { t.join(0, 1); i ++; } ProgMon.Stop(); } private static class ReqThread implements Runnable { public void run() { try { while (true) { YoutubeData.Req r = YoutubeData.allReqs.poll(0, TimeUnit.NANOSECONDS); if (r == null) break; //Cons.P(String.format("%s tid=%d", r, Thread.currentThread().getId())); if (r.type == YoutubeData.Req.Type.W) { SimTime.SleepUntilSimulatedTime(r); DbWriteMeasureTime(r); } else { SimTime.SleepUntilSimulatedTime(r); DbReadMeasureTime(r); } } } catch (Exception e) { // Better stop the process all together here. // com.datastax.driver.core.exceptions.NoHostAvailableException is an example. System.out.printf("Exception: %s\n%s\n", e, Util.GetStackTrace(e)); System.exit(1); } } } private static void DbWriteMeasureTime(YoutubeData.Req r) throws Exception { long begin = System.nanoTime(); if (Conf.acornYoutubeOptions.replication_type.equals("full")) { Cass.WriteYoutubeRegular(r); } else { Cass.WriteYoutubePartial(r); } long end = System.nanoTime(); // Note: These 2 can be merged, when you have some time left. LatMon.Write(end - begin); ProgMon.Write(); } private static void DbReadMeasureTime(YoutubeData.Req r) throws Exception { long begin = System.nanoTime(); if (Conf.acornYoutubeOptions.replication_type.equals("full")) { Cass.ReadYoutubeRegular(r); } else { _FetchOnDemand(r); } long end = System.nanoTime(); LatMon.Read(end - begin); ProgMon.Read(); } private static void _AgreeOnStartTime() throws Exception { // Agree on the future, start time. // - Issue an execution barrier and measure the time from the east. // - East post a reasonable future time and everyone polls the value. // - If the value is in a reasonable future, like at least 100 ms in the // future, then go. // - Otherwise, throw an exception. try (Cons.MT _ = new Cons.MT("Agreeing on the start time ...")) { // This, the first one, could take long. Depending on the last operation. Cass.ExecutionBarrier(); // From the second one, it osilates with 2 nodes. With more than 2, // it won't be as big. long maxLapTime = Math.max(Cass.ExecutionBarrier(), Cass.ExecutionBarrier()); Cons.P("maxLapTime=%d ms", maxLapTime); // System.currentTimeMillis() is the time from 1970 in UTC. Good! long startTime; if (Conf.acornYoutubeOptions.use_acorn_server) { if (Cass.LocalDC().equals("us-east")) { long now = System.currentTimeMillis(); startTime = now + maxLapTime * 5; Cass.WriteStartTime(startTime); } else { startTime = Cass.ReadStartTimeUntilSucceed(); } } else { long now = System.currentTimeMillis(); startTime = now + 100; } SimTime.SetStartSimulationTime(startTime); } } private static void _FetchOnDemand(YoutubeData.Req r) throws Exception { if (! Conf.acornYoutubeOptions.use_acorn_server) return; List<Row> rows = Cass.ReadYoutubePartial(r); if (rows == null) { // Read timeout. Report to the ProgMon and ignore for now. return; } if (rows.size() == 1) return; if (rows.size() != 0) throw new RuntimeException(String.format("Unexpected: rows.size()=%d", rows.size())); // Get a DC where the object is String dc = Cass.GetObjLoc(r.vid); if (dc == null) { ProgMon.ReadMissDc(); return; } // Possible since the updates to acorn.*_obj_loc keyspace and acorn.*_pr // keyspace are asynchronous. rows = Cass.ReadYoutubePartial(r, dc); if (rows == null) { // Read timeout return; } if (rows.size() == 0) { ProgMon.ReadMissObj(); return; } Row row = rows.get(0); String vid = row.getString("video_id"); String videoUploader = row.getString("uid"); Set<String> topics = row.getSet("topics", String.class); ByteBuffer extraData = row.getBytes("extra_data"); Cass.WriteYoutubePartial(vid, videoUploader, topics, extraData); ProgMon.FetchOnDemand(); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import com.dd.plist.NSArray; import com.dd.plist.NSNumber; import com.dd.plist.NSObject; import com.dd.plist.NSString; import com.facebook.buck.apple.platform_type.ApplePlatformType; import com.facebook.buck.apple.toolchain.AppleCxxPlatform; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.apple.toolchain.AppleSdk; import com.facebook.buck.apple.toolchain.CodeSignIdentity; import com.facebook.buck.apple.toolchain.CodeSignIdentityStore; import com.facebook.buck.apple.toolchain.ProvisioningProfileMetadata; import com.facebook.buck.apple.toolchain.ProvisioningProfileStore; import com.facebook.buck.cxx.CxxPreprocessorInput; import com.facebook.buck.cxx.HasAppleDebugSymbolDeps; import com.facebook.buck.cxx.NativeTestable; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.file.WriteFile; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.AbstractBuildRuleWithDeclaredAndExtraDeps; import com.facebook.buck.rules.AddToRuleKey; import com.facebook.buck.rules.BinaryBuildRule; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildableContext; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.ExplicitBuildTargetSourcePath; import com.facebook.buck.rules.HasRuntimeDeps; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.CopyStep; import com.facebook.buck.step.fs.FindAndReplaceStep; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirStep; import com.facebook.buck.step.fs.MoveStep; import com.facebook.buck.step.fs.RmStep; import com.facebook.buck.step.fs.WriteFileStep; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.types.Either; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.hash.HashCode; import com.google.common.io.Files; import com.google.common.util.concurrent.Futures; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Creates a bundle: a directory containing files and subdirectories, described by an Info.plist. */ public class AppleBundle extends AbstractBuildRuleWithDeclaredAndExtraDeps implements NativeTestable, BuildRuleWithBinary, HasRuntimeDeps, BinaryBuildRule { private static final Logger LOG = Logger.get(AppleBundle.class); public static final String CODE_SIGN_ENTITLEMENTS = "CODE_SIGN_ENTITLEMENTS"; private static final String FRAMEWORK_EXTENSION = AppleBundleExtension.FRAMEWORK.toFileExtension(); private static final String PP_DRY_RUN_RESULT_FILE = "BUCK_pp_dry_run.plist"; private static final String CODE_SIGN_DRY_RUN_ARGS_FILE = "BUCK_code_sign_args.plist"; private static final String CODE_SIGN_DRY_RUN_ENTITLEMENTS_FILE = "BUCK_code_sign_entitlements.plist"; @AddToRuleKey private final String extension; @AddToRuleKey private final Optional<String> productName; @AddToRuleKey private final SourcePath infoPlist; @AddToRuleKey private final ImmutableMap<String, String> infoPlistSubstitutions; @AddToRuleKey private final Optional<SourcePath> entitlementsFile; @AddToRuleKey private final Optional<BuildRule> binary; @AddToRuleKey private final Optional<AppleDsym> appleDsym; @AddToRuleKey private final ImmutableSet<BuildRule> extraBinaries; @AddToRuleKey private final AppleBundleDestinations destinations; @AddToRuleKey private final AppleBundleResources resources; @AddToRuleKey private final Set<SourcePath> frameworks; @AddToRuleKey private final Tool ibtool; @AddToRuleKey private final ImmutableSortedSet<BuildTarget> tests; @AddToRuleKey private final ApplePlatform platform; @AddToRuleKey private final String sdkName; @AddToRuleKey private final String sdkVersion; @AddToRuleKey private final ProvisioningProfileStore provisioningProfileStore; @AddToRuleKey private final Supplier<ImmutableList<CodeSignIdentity>> codeSignIdentitiesSupplier; @AddToRuleKey private final Optional<Tool> codesignAllocatePath; @AddToRuleKey private final Tool codesign; @AddToRuleKey private final Optional<Tool> swiftStdlibTool; @AddToRuleKey private final boolean dryRunCodeSigning; @AddToRuleKey private final ImmutableList<String> codesignFlags; @AddToRuleKey private final Optional<String> codesignIdentitySubjectName; // Need to use String here as RuleKeyBuilder requires that paths exist to compute hashes. @AddToRuleKey private final ImmutableMap<SourcePath, String> extensionBundlePaths; private final Optional<AppleAssetCatalog> assetCatalog; private final Optional<CoreDataModel> coreDataModel; private final Optional<SceneKitAssets> sceneKitAssets; private final Optional<String> platformBuildVersion; private final Optional<String> xcodeVersion; private final Optional<String> xcodeBuildVersion; private final Path sdkPath; private final String minOSVersion; private final String binaryName; private final Path bundleRoot; private final Path binaryPath; private final Path bundleBinaryPath; private final ImmutableList<String> ibtoolModuleParams; private final boolean hasBinary; private final boolean cacheable; private final boolean verifyResources; AppleBundle( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver buildRuleResolver, Either<AppleBundleExtension, String> extension, Optional<String> productName, SourcePath infoPlist, Map<String, String> infoPlistSubstitutions, Optional<BuildRule> binary, Optional<AppleDsym> appleDsym, ImmutableSet<BuildRule> extraBinaries, AppleBundleDestinations destinations, AppleBundleResources resources, ImmutableMap<SourcePath, String> extensionBundlePaths, Set<SourcePath> frameworks, AppleCxxPlatform appleCxxPlatform, Optional<AppleAssetCatalog> assetCatalog, Optional<CoreDataModel> coreDataModel, Optional<SceneKitAssets> sceneKitAssets, Set<BuildTarget> tests, CodeSignIdentityStore codeSignIdentityStore, ProvisioningProfileStore provisioningProfileStore, boolean dryRunCodeSigning, boolean cacheable, boolean verifyResources, ImmutableList<String> codesignFlags, Optional<String> codesignIdentity, Optional<Boolean> ibtoolModuleFlag) { super(buildTarget, projectFilesystem, params); this.extension = extension.isLeft() ? extension.getLeft().toFileExtension() : extension.getRight(); this.productName = productName; this.infoPlist = infoPlist; this.infoPlistSubstitutions = ImmutableMap.copyOf(infoPlistSubstitutions); this.binary = binary; Optional<SourcePath> entitlementsFile = Optional.empty(); if (binary.isPresent()) { Optional<HasEntitlementsFile> hasEntitlementsFile = buildRuleResolver.requireMetadata( binary.get().getBuildTarget(), HasEntitlementsFile.class); if (hasEntitlementsFile.isPresent()) { entitlementsFile = hasEntitlementsFile.get().getEntitlementsFile(); } } this.entitlementsFile = entitlementsFile; this.appleDsym = appleDsym; this.extraBinaries = extraBinaries; this.destinations = destinations; this.resources = resources; this.extensionBundlePaths = extensionBundlePaths; this.frameworks = frameworks; this.ibtool = appleCxxPlatform.getIbtool(); this.assetCatalog = assetCatalog; this.coreDataModel = coreDataModel; this.sceneKitAssets = sceneKitAssets; this.binaryName = getBinaryName(getBuildTarget(), this.productName); this.bundleRoot = getBundleRoot(getProjectFilesystem(), getBuildTarget(), this.binaryName, this.extension); this.binaryPath = this.destinations.getExecutablesPath().resolve(this.binaryName); this.tests = ImmutableSortedSet.copyOf(tests); AppleSdk sdk = appleCxxPlatform.getAppleSdk(); this.platform = sdk.getApplePlatform(); this.sdkName = sdk.getName(); this.sdkPath = appleCxxPlatform.getAppleSdkPaths().getSdkPath(); this.sdkVersion = sdk.getVersion(); this.minOSVersion = appleCxxPlatform.getMinVersion(); this.platformBuildVersion = appleCxxPlatform.getBuildVersion(); this.xcodeBuildVersion = appleCxxPlatform.getXcodeBuildVersion(); this.xcodeVersion = appleCxxPlatform.getXcodeVersion(); this.dryRunCodeSigning = dryRunCodeSigning; this.cacheable = cacheable; this.verifyResources = verifyResources; this.codesignFlags = codesignFlags; this.codesignIdentitySubjectName = codesignIdentity; this.ibtoolModuleParams = ibtoolModuleFlag.orElse(false) ? ImmutableList.of("--module", this.binaryName) : ImmutableList.of(); bundleBinaryPath = bundleRoot.resolve(binaryPath); hasBinary = binary.isPresent() && binary.get().getSourcePathToOutput() != null; if (needCodeSign() && !adHocCodeSignIsSufficient()) { this.provisioningProfileStore = provisioningProfileStore; this.codeSignIdentitiesSupplier = codeSignIdentityStore.getIdentitiesSupplier(); } else { this.provisioningProfileStore = ProvisioningProfileStore.empty(); this.codeSignIdentitiesSupplier = Suppliers.ofInstance(ImmutableList.of()); } this.codesignAllocatePath = appleCxxPlatform.getCodesignAllocate(); this.codesign = appleCxxPlatform.getCodesignProvider().resolve(buildRuleResolver); this.swiftStdlibTool = appleCxxPlatform.getSwiftPlatform().isPresent() ? appleCxxPlatform.getSwiftPlatform().get().getSwiftStdlibTool() : Optional.empty(); } public static String getBinaryName(BuildTarget buildTarget, Optional<String> productName) { if (productName.isPresent()) { return productName.get(); } else { return buildTarget.getShortName(); } } public static Path getBundleRoot( ProjectFilesystem filesystem, BuildTarget buildTarget, String binaryName, String extension) { return BuildTargets.getGenPath(filesystem, buildTarget, "%s") .resolve(binaryName + "." + extension); } public String getExtension() { return extension; } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), bundleRoot); } public Path getInfoPlistPath() { return getMetadataPath().resolve("Info.plist"); } public Path getUnzippedOutputFilePathToBinary() { return this.binaryPath; } private Path getMetadataPath() { return bundleRoot.resolve(destinations.getMetadataPath()); } public String getPlatformName() { return platform.getName(); } public Optional<BuildRule> getBinary() { return binary; } public Optional<AppleDsym> getAppleDsym() { return appleDsym; } public boolean isLegacyWatchApp() { return extension.equals(AppleBundleExtension.APP.toFileExtension()) && binary.isPresent() && binary .get() .getBuildTarget() .getFlavors() .contains(AppleBinaryDescription.LEGACY_WATCH_FLAVOR); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder(); stepsBuilder.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleRoot))); Path resourcesDestinationPath = bundleRoot.resolve(this.destinations.getResourcesPath()); if (assetCatalog.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); Path bundleDir = assetCatalog.get().getOutputDir(); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), bundleDir, resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } if (coreDataModel.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context .getSourcePathResolver() .getRelativePath(coreDataModel.get().getSourcePathToOutput()), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } if (sceneKitAssets.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context .getSourcePathResolver() .getRelativePath(sceneKitAssets.get().getSourcePathToOutput()), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } Path metadataPath = getMetadataPath(); Path infoPlistInputPath = context.getSourcePathResolver().getAbsolutePath(infoPlist); Path infoPlistSubstitutionTempPath = BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s.plist"); Path infoPlistOutputPath = metadataPath.resolve("Info.plist"); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), metadataPath))); if (needsPkgInfoFile()) { // TODO(bhamiltoncx): This is only appropriate for .app bundles. stepsBuilder.add( new WriteFileStep( getProjectFilesystem(), "APPLWRUN", metadataPath.resolve("PkgInfo"), /* executable */ false)); } stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), infoPlistSubstitutionTempPath.getParent())), new FindAndReplaceStep( getProjectFilesystem(), infoPlistInputPath, infoPlistSubstitutionTempPath, InfoPlistSubstitution.createVariableExpansionFunction( withDefaults( infoPlistSubstitutions, ImmutableMap.of( "EXECUTABLE_NAME", binaryName, "PRODUCT_NAME", binaryName)))), new PlistProcessStep( getProjectFilesystem(), infoPlistSubstitutionTempPath, assetCatalog.isPresent() ? Optional.of(assetCatalog.get().getOutputPlist()) : Optional.empty(), infoPlistOutputPath, getInfoPlistAdditionalKeys(), getInfoPlistOverrideKeys(), PlistProcessStep.OutputFormat.BINARY)); if (hasBinary) { appendCopyBinarySteps(stepsBuilder, context); appendCopyDsymStep(stepsBuilder, buildableContext, context); } if (!Iterables.isEmpty( Iterables.concat( resources.getResourceDirs(), resources.getDirsContainingResourceDirs(), resources.getResourceFiles()))) { if (verifyResources) { verifyResourceConflicts(resources, context.getSourcePathResolver()); } stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); for (SourcePath dir : resources.getResourceDirs()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context.getSourcePathResolver().getAbsolutePath(dir), resourcesDestinationPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); } for (SourcePath dir : resources.getDirsContainingResourceDirs()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context.getSourcePathResolver().getAbsolutePath(dir), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } for (SourcePath file : resources.getResourceFiles()) { Path resolvedFilePath = context.getSourcePathResolver().getAbsolutePath(file); Path destinationPath = resourcesDestinationPath.resolve(resolvedFilePath.getFileName()); addResourceProcessingSteps( context.getSourcePathResolver(), resolvedFilePath, destinationPath, stepsBuilder); } } ImmutableList.Builder<Path> codeSignOnCopyPathsBuilder = ImmutableList.builder(); addStepsToCopyExtensionBundlesDependencies(context, stepsBuilder, codeSignOnCopyPathsBuilder); for (SourcePath variantSourcePath : resources.getResourceVariantFiles()) { Path variantFilePath = context.getSourcePathResolver().getAbsolutePath(variantSourcePath); Path variantDirectory = variantFilePath.getParent(); if (variantDirectory == null || !variantDirectory.toString().endsWith(".lproj")) { throw new HumanReadableException( "Variant files have to be in a directory with name ending in '.lproj', " + "but '%s' is not.", variantFilePath); } Path bundleVariantDestinationPath = resourcesDestinationPath.resolve(variantDirectory.getFileName()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleVariantDestinationPath))); Path destinationPath = bundleVariantDestinationPath.resolve(variantFilePath.getFileName()); addResourceProcessingSteps( context.getSourcePathResolver(), variantFilePath, destinationPath, stepsBuilder); } if (!frameworks.isEmpty()) { Path frameworksDestinationPath = bundleRoot.resolve(this.destinations.getFrameworksPath()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), frameworksDestinationPath))); for (SourcePath framework : frameworks) { Path srcPath = context.getSourcePathResolver().getAbsolutePath(framework); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), srcPath, frameworksDestinationPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); codeSignOnCopyPathsBuilder.add(frameworksDestinationPath.resolve(srcPath.getFileName())); } } if (needCodeSign()) { Optional<Path> signingEntitlementsTempPath; Supplier<CodeSignIdentity> codeSignIdentitySupplier; if (adHocCodeSignIsSufficient()) { signingEntitlementsTempPath = Optional.empty(); CodeSignIdentity identity = codesignIdentitySubjectName .map(id -> CodeSignIdentity.ofAdhocSignedWithSubjectCommonName(id)) .orElse(CodeSignIdentity.AD_HOC); codeSignIdentitySupplier = () -> identity; } else { // Copy the .mobileprovision file if the platform requires it, and sign the executable. Optional<Path> entitlementsPlist = Optional.empty(); // Try to use the entitlements file specified in the bundle's binary first. entitlementsPlist = entitlementsFile.map(p -> context.getSourcePathResolver().getAbsolutePath(p)); // Fall back to getting CODE_SIGN_ENTITLEMENTS from info_plist_substitutions. if (!entitlementsPlist.isPresent()) { final Path srcRoot = getProjectFilesystem().getRootPath().resolve(getBuildTarget().getBasePath()); Optional<String> entitlementsPlistString = InfoPlistSubstitution.getVariableExpansionForPlatform( CODE_SIGN_ENTITLEMENTS, platform.getName(), withDefaults( infoPlistSubstitutions, ImmutableMap.of( "SOURCE_ROOT", srcRoot.toString(), "SRCROOT", srcRoot.toString()))); entitlementsPlist = entitlementsPlistString.map( entitlementsPlistName -> { ProjectFilesystem filesystem = getProjectFilesystem(); Path originalEntitlementsPlist = srcRoot.resolve(Paths.get(entitlementsPlistName)); Path entitlementsPlistWithSubstitutions = BuildTargets.getScratchPath( filesystem, getBuildTarget(), "%s-Entitlements.plist"); stepsBuilder.add( new FindAndReplaceStep( filesystem, originalEntitlementsPlist, entitlementsPlistWithSubstitutions, InfoPlistSubstitution.createVariableExpansionFunction( infoPlistSubstitutions))); return filesystem.resolve(entitlementsPlistWithSubstitutions); }); } signingEntitlementsTempPath = Optional.of( BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s.xcent")); final Path dryRunResultPath = bundleRoot.resolve(PP_DRY_RUN_RESULT_FILE); final ProvisioningProfileCopyStep provisioningProfileCopyStep = new ProvisioningProfileCopyStep( getProjectFilesystem(), infoPlistOutputPath, platform, Optional.empty(), // Provisioning profile UUID -- find automatically. entitlementsPlist, provisioningProfileStore, resourcesDestinationPath.resolve("embedded.mobileprovision"), dryRunCodeSigning ? bundleRoot.resolve(CODE_SIGN_DRY_RUN_ENTITLEMENTS_FILE) : signingEntitlementsTempPath.get(), codeSignIdentitiesSupplier, dryRunCodeSigning ? Optional.of(dryRunResultPath) : Optional.empty()); stepsBuilder.add(provisioningProfileCopyStep); codeSignIdentitySupplier = () -> { // Using getUnchecked here because the previous step should already throw if exception // occurred, and this supplier would never be evaluated. Optional<ProvisioningProfileMetadata> selectedProfile = Futures.getUnchecked( provisioningProfileCopyStep.getSelectedProvisioningProfileFuture()); if (!selectedProfile.isPresent()) { // This should only happen in dry-run codesign mode (since otherwise an exception // would have been thrown already.) Still, we need to return *something*. Preconditions.checkState(dryRunCodeSigning); return CodeSignIdentity.AD_HOC; } ImmutableSet<HashCode> fingerprints = selectedProfile.get().getDeveloperCertificateFingerprints(); if (fingerprints.isEmpty()) { // No constraints, pick an arbitrary identity. // If no identities are available, use an ad-hoc identity. return Iterables.getFirst( codeSignIdentitiesSupplier.get(), CodeSignIdentity.AD_HOC); } for (CodeSignIdentity identity : codeSignIdentitiesSupplier.get()) { if (identity.getFingerprint().isPresent() && fingerprints.contains(identity.getFingerprint().get())) { return identity; } } throw new HumanReadableException( "No code sign identity available for provisioning profile: %s\n" + "Profile requires an identity with one of the following SHA1 fingerprints " + "available in your keychain: \n %s", selectedProfile.get().getProfilePath(), Joiner.on("\n ").join(fingerprints)); }; } addSwiftStdlibStepIfNeeded( context.getSourcePathResolver(), bundleRoot.resolve(destinations.getFrameworksPath()), dryRunCodeSigning ? Optional.<Supplier<CodeSignIdentity>>empty() : Optional.of(codeSignIdentitySupplier), stepsBuilder, false /* is for packaging? */); for (BuildRule extraBinary : extraBinaries) { Path outputPath = getBundleBinaryPathForBuildRule(extraBinary); codeSignOnCopyPathsBuilder.add(outputPath); } for (Path codeSignOnCopyPath : codeSignOnCopyPathsBuilder.build()) { stepsBuilder.add( new CodeSignStep( getProjectFilesystem(), context.getSourcePathResolver(), codeSignOnCopyPath, Optional.empty(), codeSignIdentitySupplier, codesign, codesignAllocatePath, dryRunCodeSigning ? Optional.of(codeSignOnCopyPath.resolve(CODE_SIGN_DRY_RUN_ARGS_FILE)) : Optional.empty(), codesignFlags)); } stepsBuilder.add( new CodeSignStep( getProjectFilesystem(), context.getSourcePathResolver(), bundleRoot, signingEntitlementsTempPath, codeSignIdentitySupplier, codesign, codesignAllocatePath, dryRunCodeSigning ? Optional.of(bundleRoot.resolve(CODE_SIGN_DRY_RUN_ARGS_FILE)) : Optional.empty(), codesignFlags)); } else { addSwiftStdlibStepIfNeeded( context.getSourcePathResolver(), bundleRoot.resolve(destinations.getFrameworksPath()), Optional.<Supplier<CodeSignIdentity>>empty(), stepsBuilder, false /* is for packaging? */); } // Ensure the bundle directory is archived so we can fetch it later. buildableContext.recordArtifact( context.getSourcePathResolver().getRelativePath(getSourcePathToOutput())); return stepsBuilder.build(); } private void verifyResourceConflicts( AppleBundleResources resources, SourcePathResolver resolver) { // Ensure there are no resources that will overwrite each other // TODO: handle ResourceDirsContainingResourceDirs Set<Path> resourcePaths = new HashSet<>(); for (SourcePath path : Iterables.concat(resources.getResourceDirs(), resources.getResourceFiles())) { Path pathInBundle = resolver.getRelativePath(path).getFileName(); if (resourcePaths.contains(pathInBundle)) { throw new HumanReadableException( "Bundle contains multiple resources with path %s", pathInBundle); } else { resourcePaths.add(pathInBundle); } } } private boolean needsPkgInfoFile() { if (extension.equals(AppleBundleExtension.XPC.toFileExtension())) { return false; } return true; } private void appendCopyBinarySteps( ImmutableList.Builder<Step> stepsBuilder, BuildContext context) { Preconditions.checkArgument(hasBinary); final Path binaryOutputPath = context .getSourcePathResolver() .getRelativePath(Preconditions.checkNotNull(binary.get().getSourcePathToOutput())); ImmutableMap.Builder<Path, Path> binariesBuilder = ImmutableMap.builder(); binariesBuilder.put(bundleBinaryPath, binaryOutputPath); for (BuildRule extraBinary : extraBinaries) { Path outputPath = context.getSourcePathResolver().getRelativePath(extraBinary.getSourcePathToOutput()); Path bundlePath = getBundleBinaryPathForBuildRule(extraBinary); binariesBuilder.put(bundlePath, outputPath); } copyBinariesIntoBundle(stepsBuilder, context, binariesBuilder.build()); copyAnotherCopyOfWatchKitStub(stepsBuilder, context, binaryOutputPath); } private Path getBundleBinaryPathForBuildRule(BuildRule buildRule) { BuildTarget unflavoredTarget = buildRule.getBuildTarget().withFlavors(); String binaryName = getBinaryName(unflavoredTarget, Optional.empty()); Path pathRelativeToBundleRoot = destinations.getExecutablesPath().resolve(binaryName); return bundleRoot.resolve(pathRelativeToBundleRoot); } /** * @param binariesMap A map from destination to source. Destination is deliberately used as a key * prevent multiple sources overwriting the same destination. */ private void copyBinariesIntoBundle( ImmutableList.Builder<Step> stepsBuilder, BuildContext context, ImmutableMap<Path, Path> binariesMap) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleRoot.resolve(this.destinations.getExecutablesPath())))); binariesMap.forEach( (binaryBundlePath, binaryOutputPath) -> { stepsBuilder.add( CopyStep.forFile(getProjectFilesystem(), binaryOutputPath, binaryBundlePath)); }); } private void copyAnotherCopyOfWatchKitStub( ImmutableList.Builder<Step> stepsBuilder, BuildContext context, Path binaryOutputPath) { if ((isLegacyWatchApp() || platform.getName().contains("watch")) && binary.get() instanceof WriteFile) { final Path watchKitStubDir = bundleRoot.resolve("_WatchKitStub"); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), watchKitStubDir)), CopyStep.forFile( getProjectFilesystem(), binaryOutputPath, watchKitStubDir.resolve("WK"))); } } private void appendCopyDsymStep( ImmutableList.Builder<Step> stepsBuilder, BuildableContext buildableContext, BuildContext buildContext) { if (appleDsym.isPresent()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), buildContext .getSourcePathResolver() .getRelativePath(appleDsym.get().getSourcePathToOutput()), bundleRoot.getParent(), CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); appendDsymRenameStepToMatchBundleName(stepsBuilder, buildableContext, buildContext); } } private void appendDsymRenameStepToMatchBundleName( ImmutableList.Builder<Step> stepsBuilder, BuildableContext buildableContext, BuildContext buildContext) { Preconditions.checkArgument(hasBinary && appleDsym.isPresent()); // rename dSYM bundle to match bundle name Path dsymPath = buildContext .getSourcePathResolver() .getRelativePath(appleDsym.get().getSourcePathToOutput()); Path dsymSourcePath = bundleRoot.getParent().resolve(dsymPath.getFileName()); Path dsymDestinationPath = bundleRoot .getParent() .resolve(bundleRoot.getFileName() + "." + AppleBundleExtension.DSYM.toFileExtension()); stepsBuilder.add( RmStep.of( BuildCellRelativePath.fromCellRelativePath( buildContext.getBuildCellRootPath(), getProjectFilesystem(), dsymDestinationPath)) .withRecursive(true)); stepsBuilder.add(new MoveStep(getProjectFilesystem(), dsymSourcePath, dsymDestinationPath)); String dwarfFilename = AppleDsym.getDwarfFilenameForDsymTarget(appleDsym.get().getBuildTarget()); // rename DWARF file inside dSYM bundle to match bundle name Path dwarfFolder = dsymDestinationPath.resolve(AppleDsym.DSYM_DWARF_FILE_FOLDER); Path dwarfSourcePath = dwarfFolder.resolve(dwarfFilename); Path dwarfDestinationPath = dwarfFolder.resolve(MorePaths.getNameWithoutExtension(bundleRoot)); stepsBuilder.add(new MoveStep(getProjectFilesystem(), dwarfSourcePath, dwarfDestinationPath)); // record dSYM so we can fetch it from cache buildableContext.recordArtifact(dsymDestinationPath); } private void addStepsToCopyExtensionBundlesDependencies( BuildContext context, ImmutableList.Builder<Step> stepsBuilder, ImmutableList.Builder<Path> codeSignOnCopyPathsBuilder) { for (Map.Entry<SourcePath, String> entry : extensionBundlePaths.entrySet()) { Path srcPath = context.getSourcePathResolver().getAbsolutePath(entry.getKey()); Path destPath = bundleRoot.resolve(entry.getValue()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), destPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), srcPath, destPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); if (srcPath.toString().endsWith("." + FRAMEWORK_EXTENSION)) { codeSignOnCopyPathsBuilder.add(destPath.resolve(srcPath.getFileName())); } } } public static ImmutableMap<String, String> withDefaults( ImmutableMap<String, String> map, ImmutableMap<String, String> defaults) { ImmutableMap.Builder<String, String> builder = ImmutableMap.<String, String>builder().putAll(map); for (ImmutableMap.Entry<String, String> entry : defaults.entrySet()) { if (!map.containsKey(entry.getKey())) { builder = builder.put(entry.getKey(), entry.getValue()); } } return builder.build(); } private boolean needsLSRequiresIPhoneOSInfoPlistKeyOnMac() { return !extension.equals(AppleBundleExtension.XPC.toFileExtension()); } private ImmutableMap<String, NSObject> getInfoPlistOverrideKeys() { ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder(); if (platform.getType() == ApplePlatformType.MAC) { if (needsLSRequiresIPhoneOSInfoPlistKeyOnMac()) { keys.put("LSRequiresIPhoneOS", new NSNumber(false)); } } else if (!platform.getType().isWatch() && !isLegacyWatchApp()) { keys.put("LSRequiresIPhoneOS", new NSNumber(true)); } return keys.build(); } private boolean needsAppInfoPlistKeysOnMac() { if (extension.equals(AppleBundleExtension.XPC.toFileExtension())) { // XPC bundles on macOS don't require app-specific keys // (which also confuses Finder in displaying the XPC bundles as apps) return false; } return true; } private ImmutableMap<String, NSObject> getInfoPlistAdditionalKeys() { ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder(); switch (platform.getType()) { case MAC: if (needsAppInfoPlistKeysOnMac()) { keys.put("NSHighResolutionCapable", new NSNumber(true)); keys.put("NSSupportsAutomaticGraphicsSwitching", new NSNumber(true)); } keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("MacOSX"))); break; case IOS_DEVICE: keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("iPhoneOS"))); break; case IOS_SIMULATOR: keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("iPhoneSimulator"))); break; case WATCH_DEVICE: if (!isLegacyWatchApp()) { keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("WatchOS"))); } break; case WATCH_SIMULATOR: if (!isLegacyWatchApp()) { keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("WatchSimulator"))); } break; case TV_DEVICE: case TV_SIMULATOR: case UNKNOWN: break; } keys.put("DTPlatformName", new NSString(platform.getName())); keys.put("DTPlatformVersion", new NSString(sdkVersion)); keys.put("DTSDKName", new NSString(sdkName + sdkVersion)); keys.put("MinimumOSVersion", new NSString(minOSVersion)); if (platformBuildVersion.isPresent()) { keys.put("DTPlatformBuild", new NSString(platformBuildVersion.get())); keys.put("DTSDKBuild", new NSString(platformBuildVersion.get())); } if (xcodeBuildVersion.isPresent()) { keys.put("DTXcodeBuild", new NSString(xcodeBuildVersion.get())); } if (xcodeVersion.isPresent()) { keys.put("DTXcode", new NSString(xcodeVersion.get())); } return keys.build(); } public void addSwiftStdlibStepIfNeeded( SourcePathResolver resolver, Path destinationPath, Optional<Supplier<CodeSignIdentity>> codeSignIdentitySupplier, ImmutableList.Builder<Step> stepsBuilder, boolean isForPackaging) { // It's apparently safe to run this even on a non-swift bundle (in that case, no libs // are copied over). if (swiftStdlibTool.isPresent()) { ImmutableList.Builder<String> swiftStdlibCommand = ImmutableList.builder(); swiftStdlibCommand.addAll(swiftStdlibTool.get().getCommandPrefix(resolver)); swiftStdlibCommand.add( "--scan-executable", bundleBinaryPath.toString(), "--scan-folder", bundleRoot.resolve(this.destinations.getFrameworksPath()).toString(), "--scan-folder", bundleRoot.resolve(destinations.getPlugInsPath()).toString()); String tempDirPattern = isForPackaging ? "__swift_packaging_temp__%s" : "__swift_temp__%s"; stepsBuilder.add( new SwiftStdlibStep( getProjectFilesystem().getRootPath(), BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), tempDirPattern), this.sdkPath, destinationPath, swiftStdlibCommand.build(), codeSignIdentitySupplier)); } } private void addStoryboardProcessingSteps( SourcePathResolver resolver, Path sourcePath, Path destinationPath, ImmutableList.Builder<Step> stepsBuilder) { if (platform.getName().contains("watch") || isLegacyWatchApp()) { LOG.debug( "Compiling storyboard %s to storyboardc %s and linking", sourcePath, destinationPath); Path compiledStoryboardPath = BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s.storyboardc"); stepsBuilder.add( new IbtoolStep( getBuildTarget(), getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleParams, ImmutableList.of("--target-device", "watch", "--compile"), sourcePath, compiledStoryboardPath)); stepsBuilder.add( new IbtoolStep( getBuildTarget(), getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleParams, ImmutableList.of("--target-device", "watch", "--link"), compiledStoryboardPath, destinationPath.getParent())); } else { LOG.debug("Compiling storyboard %s to storyboardc %s", sourcePath, destinationPath); String compiledStoryboardFilename = Files.getNameWithoutExtension(destinationPath.toString()) + ".storyboardc"; Path compiledStoryboardPath = destinationPath.getParent().resolve(compiledStoryboardFilename); stepsBuilder.add( new IbtoolStep( getBuildTarget(), getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleParams, ImmutableList.of("--compile"), sourcePath, compiledStoryboardPath)); } } private void addResourceProcessingSteps( SourcePathResolver resolver, Path sourcePath, Path destinationPath, ImmutableList.Builder<Step> stepsBuilder) { String sourcePathExtension = Files.getFileExtension(sourcePath.toString()).toLowerCase(Locale.US); switch (sourcePathExtension) { case "plist": case "stringsdict": LOG.debug("Converting plist %s to binary plist %s", sourcePath, destinationPath); stepsBuilder.add( new PlistProcessStep( getProjectFilesystem(), sourcePath, Optional.empty(), destinationPath, ImmutableMap.of(), ImmutableMap.of(), PlistProcessStep.OutputFormat.BINARY)); break; case "storyboard": addStoryboardProcessingSteps(resolver, sourcePath, destinationPath, stepsBuilder); break; case "xib": String compiledNibFilename = Files.getNameWithoutExtension(destinationPath.toString()) + ".nib"; Path compiledNibPath = destinationPath.getParent().resolve(compiledNibFilename); LOG.debug("Compiling XIB %s to NIB %s", sourcePath, destinationPath); stepsBuilder.add( new IbtoolStep( getBuildTarget(), getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleParams, ImmutableList.of("--compile"), sourcePath, compiledNibPath)); break; default: stepsBuilder.add(CopyStep.forFile(getProjectFilesystem(), sourcePath, destinationPath)); break; } } @Override public boolean isTestedBy(BuildTarget testRule) { if (tests.contains(testRule)) { return true; } if (binary.isPresent()) { BuildRule binaryRule = binary.get(); if (binaryRule instanceof NativeTestable) { return ((NativeTestable) binaryRule).isTestedBy(testRule); } } return false; } @Override public CxxPreprocessorInput getPrivateCxxPreprocessorInput(CxxPlatform cxxPlatform) { if (binary.isPresent()) { BuildRule binaryRule = binary.get(); if (binaryRule instanceof NativeTestable) { return ((NativeTestable) binaryRule).getPrivateCxxPreprocessorInput(cxxPlatform); } } return CxxPreprocessorInput.of(); } private boolean adHocCodeSignIsSufficient() { return ApplePlatform.adHocCodeSignIsSufficient(platform.getName()); } // .framework bundles will be code-signed when they're copied into the containing bundle. private boolean needCodeSign() { return binary.isPresent() && ApplePlatform.needsCodeSign(platform.getName()) && !extension.equals(FRAMEWORK_EXTENSION); } @Override public BuildRule getBinaryBuildRule() { return binary.get(); } @Override public Stream<BuildTarget> getRuntimeDeps(SourcePathRuleFinder ruleFinder) { // When "running" an app bundle, ensure debug symbols are available. if (binary.get() instanceof HasAppleDebugSymbolDeps) { List<BuildRule> symbolDeps = ((HasAppleDebugSymbolDeps) binary.get()) .getAppleDebugSymbolDeps() .collect(Collectors.toList()); if (!symbolDeps.isEmpty()) { return Stream.concat(Stream.of(binary.get()), symbolDeps.stream()) .map(BuildRule::getBuildTarget); } } return Stream.empty(); } @Override public boolean isCacheable() { return cacheable; } @Override public Tool getExecutableCommand() { return new CommandTool.Builder() .addArg(SourcePathArg.of(PathSourcePath.of(getProjectFilesystem(), bundleBinaryPath))) .build(); } }
package org.wso2.carbon.identity.sso.agent.openid; import org.openid4java.association.AssociationException; import org.openid4java.consumer.ConsumerException; import org.openid4java.consumer.ConsumerManager; import org.openid4java.consumer.VerificationResult; import org.openid4java.discovery.Discovery; import org.openid4java.discovery.DiscoveryException; import org.openid4java.discovery.DiscoveryInformation; import org.openid4java.discovery.Identifier; import org.openid4java.discovery.yadis.YadisException; import org.openid4java.discovery.yadis.YadisResolver; import org.openid4java.message.AuthRequest; import org.openid4java.message.AuthSuccess; import org.openid4java.message.MessageException; import org.openid4java.message.ParameterList; import org.openid4java.message.ax.AxMessage; import org.openid4java.message.ax.FetchRequest; import org.openid4java.message.ax.FetchResponse; import org.openid4java.server.RealmVerifierFactory; import org.openid4java.util.HttpFetcherFactory; import org.wso2.carbon.identity.sso.agent.SSOAgentConstants; import org.wso2.carbon.identity.sso.agent.SSOAgentException; import org.wso2.carbon.identity.sso.agent.bean.LoggedInSessionBean; import org.wso2.carbon.identity.sso.agent.bean.SSOAgentConfig; import javax.net.ssl.SSLContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.List; import java.util.Map; public class OpenIDManager { // Smart OpenID Consumer Manager private static ConsumerManager consumerManager = null; AttributesRequestor attributesRequestor = null; private SSOAgentConfig ssoAgentConfig = null; public OpenIDManager(SSOAgentConfig ssoAgentConfig) throws SSOAgentException { consumerManager = getConsumerManagerInstance(); this.ssoAgentConfig = ssoAgentConfig; } private ConsumerManager getConsumerManagerInstance() throws SSOAgentException { HttpFetcherFactory httpFetcherFactory = null; try { httpFetcherFactory = new HttpFetcherFactory(SSLContext.getDefault(), null); } catch (NoSuchAlgorithmException e) { throw new SSOAgentException("Error while getting default SSL Context", e); } return new ConsumerManager( new RealmVerifierFactory(new YadisResolver(httpFetcherFactory)), new Discovery(), httpFetcherFactory); } public String doOpenIDLogin(HttpServletRequest request, HttpServletResponse response) throws SSOAgentException { String claimed_id = ssoAgentConfig.getOpenId().getClaimedId(); try { if (ssoAgentConfig.getOpenId().isDumbModeEnabled()) { // Switch the consumer manager to dumb mode consumerManager.setMaxAssocAttempts(0); } // Discovery on the user supplied ID List discoveries = consumerManager.discover(claimed_id); // Associate with the OP and share a secret DiscoveryInformation discovered = consumerManager.associate(discoveries); // Keeping necessary parameters to verify the AuthResponse LoggedInSessionBean sessionBean = new LoggedInSessionBean(); sessionBean.setOpenId(sessionBean.new OpenID()); sessionBean.getOpenId().setDiscoveryInformation(discovered); // set the discovery information request.getSession().setAttribute(SSOAgentConstants.SESSION_BEAN_NAME, sessionBean); consumerManager.setImmediateAuth(true); AuthRequest authReq = consumerManager.authenticate(discovered, ssoAgentConfig.getOpenId().getReturnToURL()); // Request subject attributes using Attribute Exchange extension specification if AttributeExchange is enabled if (ssoAgentConfig.getOpenId().isAttributeExchangeEnabled() && ssoAgentConfig.getOpenId().getAttributesRequestor() != null) { attributesRequestor = ssoAgentConfig.getOpenId().getAttributesRequestor(); attributesRequestor.init(); String[] requestedAttributes = attributesRequestor.getRequestedAttributes(claimed_id); // Getting required attributes using FetchRequest FetchRequest fetchRequest = FetchRequest.createFetchRequest(); for (String requestedAttribute : requestedAttributes) { fetchRequest.addAttribute(requestedAttribute, attributesRequestor.getTypeURI(claimed_id, requestedAttribute), attributesRequestor.isRequired(claimed_id, requestedAttribute), attributesRequestor.getCount(claimed_id, requestedAttribute)); } // Adding the AX extension to the AuthRequest message authReq.addExtension(fetchRequest); } // Returning OP Url return authReq.getDestinationUrl(true); } catch (YadisException e) { if (e.getErrorCode() == 1796) { throw new SSOAgentException(e.getMessage(), e); } throw new SSOAgentException("Error while creating FetchRequest", e); } catch (MessageException e) { throw new SSOAgentException("Error while creating FetchRequest", e); } catch (DiscoveryException e) { throw new SSOAgentException("Error while doing OpenID Discovery", e); } catch (ConsumerException e) { throw new SSOAgentException("Error while doing OpenID Authentication", e); } } public void processOpenIDLoginResponse(HttpServletRequest request, HttpServletResponse response) throws SSOAgentException { try { // Getting all parameters in request including AuthResponse ParameterList authResponseParams = new ParameterList(request.getParameterMap()); // Get previously saved session bean LoggedInSessionBean loggedInSessionBean = (LoggedInSessionBean) request.getSession(false). getAttribute(SSOAgentConstants.SESSION_BEAN_NAME); if (loggedInSessionBean == null) { throw new SSOAgentException("Error while verifying OpenID response. " + "Cannot find valid session for user"); } // Previously discovered information DiscoveryInformation discovered = loggedInSessionBean.getOpenId().getDiscoveryInformation(); // Verify return-to, discoveries, nonce & signature // Signature will be verified using the shared secret VerificationResult verificationResult = consumerManager.verify( ssoAgentConfig.getOpenId().getReturnToURL(), authResponseParams, discovered); Identifier verified = verificationResult.getVerifiedId(); // Identifier will be NULL if verification failed if (verified != null) { AuthSuccess authSuccess = (AuthSuccess) verificationResult.getAuthResponse(); loggedInSessionBean.getOpenId().setClaimedId(authSuccess.getIdentity()); // Get requested attributes using AX extension if (authSuccess.hasExtension(AxMessage.OPENID_NS_AX)) { Map<String, List<String>> attributesMap = new HashMap<String, List<String>>(); if (ssoAgentConfig.getOpenId().getAttributesRequestor() != null) { attributesRequestor = ssoAgentConfig.getOpenId().getAttributesRequestor(); String[] attrArray = attributesRequestor.getRequestedAttributes(authSuccess.getIdentity()); FetchResponse fetchResp = (FetchResponse) authSuccess.getExtension(AxMessage.OPENID_NS_AX); for (String attr : attrArray) { List attributeValues = fetchResp.getAttributeValuesByTypeUri(attributesRequestor.getTypeURI(authSuccess.getIdentity(), attr)); if (attributeValues.get(0) instanceof String && ((String) attributeValues.get(0)).split(",").length > 1) { String[] splitString = ((String) attributeValues.get(0)).split(","); for (String part : splitString) { attributeValues.add(part); } } if (attributeValues.get(0) != null) { attributesMap.put(attr, attributeValues); } } } loggedInSessionBean.getOpenId().setSubjectAttributes(attributesMap); } } else { throw new SSOAgentException("OpenID verification failed"); } } catch (AssociationException e) { throw new SSOAgentException("Error while verifying OpenID response", e); } catch (MessageException e) { throw new SSOAgentException("Error while verifying OpenID response", e); } catch (DiscoveryException e) { throw new SSOAgentException("Error while verifying OpenID response", e); } } // protected SSLContext loadSSLContext() throws SSOAgentException { // // KeyStore trustStore = null; // try { // // trustStore = SSOAgentConfig.getKeyStore(); // // TrustManagerFactory tmf = TrustManagerFactory // .getInstance(TrustManagerFactory.getDefaultAlgorithm()); // // tmf.init(trustStore); // // SSLContext sslContext = SSLContext.getInstance("TLS"); // sslContext.init(null, tmf.getTrustManagers(), null); // return sslContext; // } catch (NoSuchAlgorithmException e) { // throw new SSOAgentException("Error when reading keystore", e); // } catch (KeyManagementException e) { // throw new SSOAgentException("Error when reading keystore", e); // } catch (KeyStoreException e) { // throw new SSOAgentException("Error when reading keystore", e); // } // } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.jboss.netty.channel.socket.nio; import java.nio.channels.Selector; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.channel.socket.SocketChannel; import org.jboss.netty.util.ExternalResourceReleasable; import org.jboss.netty.util.Timer; /** * A {@link ClientSocketChannelFactory} which creates a client-side NIO-based * {@link SocketChannel}. It utilizes the non-blocking I/O mode which was * introduced with NIO to serve many number of concurrent connections * efficiently. * * <h3>How threads work</h3> * <p> * There are two types of threads in a {@link NioClientSocketChannelFactory}; * one is boss thread and the other is worker thread. * * <h4>Boss thread</h4> * <p> * One {@link NioClientSocketChannelFactory} has one boss thread. It makes * a connection attempt on request. Once a connection attempt succeeds, * the boss thread passes the connected {@link Channel} to one of the worker * threads that the {@link NioClientSocketChannelFactory} manages. * * <h4>Worker threads</h4> * <p> * One {@link NioClientSocketChannelFactory} can have one or more worker * threads. A worker thread performs non-blocking read and write for one or * more {@link Channel}s in a non-blocking mode. * * <h3>Life cycle of threads and graceful shutdown</h3> * <p> * All threads are acquired from the {@link Executor}s which were specified * when a {@link NioClientSocketChannelFactory} was created. A boss thread is * acquired from the {@code bossExecutor}, and worker threads are acquired from * the {@code workerExecutor}. Therefore, you should make sure the specified * {@link Executor}s are able to lend the sufficient number of threads. * It is the best bet to specify {@linkplain Executors#newCachedThreadPool() a cached thread pool}. * <p> * Both boss and worker threads are acquired lazily, and then released when * there's nothing left to process. All the related resources such as * {@link Selector} are also released when the boss and worker threads are * released. Therefore, to shut down a service gracefully, you should do the * following: * * <ol> * <li>close all channels created by the factory usually using * {@link ChannelGroup#close()}, and</li> * <li>call {@link #releaseExternalResources()}.</li> * </ol> * * Please make sure not to shut down the executor until all channels are * closed. Otherwise, you will end up with a {@link RejectedExecutionException} * and the related resources might not be released properly. * * @apiviz.landmark */ public class NioClientSocketChannelFactory implements ClientSocketChannelFactory { private static final int DEFAULT_BOSS_COUNT = 1; private final BossPool<NioClientBoss> bossPool; private final WorkerPool<NioWorker> workerPool; private final NioClientSocketPipelineSink sink; private boolean releasePools; /** * Creates a new {@link NioClientSocketChannelFactory} which uses {@link Executors#newCachedThreadPool()} * for the worker and boss executors. * * See {@link #NioClientSocketChannelFactory(Executor, Executor)} */ public NioClientSocketChannelFactory() { this(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); releasePools = true; } /** * Creates a new instance. Calling this constructor is same with calling * {@link #NioClientSocketChannelFactory(Executor, Executor, int, int)} with * 1 and (2 * the number of available processors in the machine) for * <tt>bossCount</tt> and <tt>workerCount</tt> respectively. The number of * available processors is obtained by {@link Runtime#availableProcessors()}. * * @param bossExecutor * the {@link Executor} which will execute the boss thread * @param workerExecutor * the {@link Executor} which will execute the worker threads */ public NioClientSocketChannelFactory( Executor bossExecutor, Executor workerExecutor) { this(bossExecutor, workerExecutor, DEFAULT_BOSS_COUNT, SelectorUtil.DEFAULT_IO_THREADS); } /** * Creates a new instance. Calling this constructor is same with calling * {@link #NioClientSocketChannelFactory(Executor, Executor, int, int)} with * 1 as <tt>bossCount</tt>. * * @param bossExecutor * the {@link Executor} which will execute the boss thread * @param workerExecutor * the {@link Executor} which will execute the worker threads * @param workerCount * the maximum number of I/O worker threads */ public NioClientSocketChannelFactory( Executor bossExecutor, Executor workerExecutor, int workerCount) { this(bossExecutor, workerExecutor, DEFAULT_BOSS_COUNT, workerCount); } /** * Creates a new instance. * * @param bossExecutor * the {@link Executor} which will execute the boss thread * @param workerExecutor * the {@link Executor} which will execute the worker threads * @param bossCount * the maximum number of boss threads * @param workerCount * the maximum number of I/O worker threads */ public NioClientSocketChannelFactory( Executor bossExecutor, Executor workerExecutor, int bossCount, int workerCount) { this(bossExecutor, bossCount, new NioWorkerPool(workerExecutor, workerCount)); } /** * Creates a new instance. * * @param bossExecutor * the {@link Executor} which will execute the boss thread * @param bossCount * the maximum number of boss threads * @param workerPool * the {@link WorkerPool} to use to do the IO */ public NioClientSocketChannelFactory( Executor bossExecutor, int bossCount, WorkerPool<NioWorker> workerPool) { this(new NioClientBossPool(bossExecutor, bossCount), workerPool); } /** * Creates a new instance. * * @param bossExecutor * the {@link Executor} which will execute the boss thread * @param bossCount * the maximum number of boss threads * @param workerPool * the {@link WorkerPool} to use to do the IO * @param timer * the {@link Timer} to use to handle the connection timeouts */ public NioClientSocketChannelFactory( Executor bossExecutor, int bossCount, WorkerPool<NioWorker> workerPool, Timer timer) { this(new NioClientBossPool(bossExecutor, bossCount, timer, null), workerPool); } /** * Creates a new instance. * * @param bossPool * the {@link BossPool} to use to handle the connects * @param workerPool * the {@link WorkerPool} to use to do the IO */ public NioClientSocketChannelFactory( BossPool<NioClientBoss> bossPool, WorkerPool<NioWorker> workerPool) { if (bossPool == null) { throw new NullPointerException("bossPool"); } if (workerPool == null) { throw new NullPointerException("workerPool"); } this.bossPool = bossPool; this.workerPool = workerPool; sink = new NioClientSocketPipelineSink(bossPool); } public SocketChannel newChannel(ChannelPipeline pipeline) { return new NioClientSocketChannel(this, pipeline, sink, workerPool.nextWorker()); } public void shutdown() { bossPool.shutdown(); workerPool.shutdown(); if (releasePools) { releasePools(); } } public void releaseExternalResources() { bossPool.shutdown(); workerPool.shutdown(); releasePools(); } private void releasePools() { if (bossPool instanceof ExternalResourceReleasable) { ((ExternalResourceReleasable) bossPool).releaseExternalResources(); } if (workerPool instanceof ExternalResourceReleasable) { ((ExternalResourceReleasable) workerPool).releaseExternalResources(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket; import static org.hamcrest.CoreMatchers.is; import java.util.NoSuchElementException; import org.apache.wicket.markup.IMarkupResourceStreamProvider; import org.apache.wicket.markup.html.WebComponent; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.WebPage; import org.apache.wicket.util.resource.IResourceStream; import org.apache.wicket.util.resource.StringResourceStream; import org.apache.wicket.util.tester.WicketTestCase; import org.junit.Assert; import org.junit.Test; /** * * @author Juergen Donnerstag */ public class MarkupContainerTest extends WicketTestCase { /** * Make sure components are iterated in the order they were added. Required e.g. for Repeaters */ @Test public void iteratorOrder() { MarkupContainer container = new WebMarkupContainer("component"); for (int i = 0; i < 10; i++) { container.add(new WebComponent(Integer.toString(i))); } int i = 0; for (Component component : container) { assertEquals(Integer.toString(i++), component.getId()); } } /** * @throws Exception */ @Test public void markupId() throws Exception { executeTest(MarkupIdTestPage.class, "MarkupIdTestPageExpectedResult.html"); } /** * */ @Test public void get() { WebMarkupContainer a = new WebMarkupContainer("a"); WebMarkupContainer b = new WebMarkupContainer("b"); WebMarkupContainer c = new WebMarkupContainer("c"); WebMarkupContainer d = new WebMarkupContainer("d"); WebMarkupContainer e = new WebMarkupContainer("e"); WebMarkupContainer f = new WebMarkupContainer("f"); // ....A // ...B....C // .......D..E // ...........F a.add(b); a.add(c); c.add(d); c.add(e); e.add(f); // basic gets assertTrue(a.get(null) == a); assertTrue(a.get("") == a); assertTrue(a.get("b") == b); assertTrue(a.get("c") == c); assertTrue(a.get("c:d") == d); assertTrue(a.get("c:e:f") == f); // parent path gets assertTrue(b.get("..") == a); assertTrue(e.get("..:..") == a); assertTrue(d.get("..:..:c:e:f") == f); assertTrue(e.get("..:d:..:e:f") == f); assertTrue(e.get("..:d:..:..") == a); // invalid gets assertNull(a.get("..")); assertNull(a.get("..:a")); assertNull(b.get("..|..")); assertNull(a.get("q")); } /** * Tests the get(int) method of MarkupContainer. */ @Test public void getIndexed() { MarkupContainer c = new WebMarkupContainer("parent"); Component c1 = new WebComponent("c1"); Component c2 = new WebComponent("c2"); c.add(c1); c.add(c2); assertThat(c.get(0), is(c1)); assertThat(c.get(1), is(c2)); } /** * Tests the get(int) method of MarkupContainer when the index exceeds the number of children. */ @Test(expected = ArrayIndexOutOfBoundsException.class) public void getIndexedArrayIndexOutOfBoundsException() { MarkupContainer c = new WebMarkupContainer("parent"); c.get(0); } /** * Tests the swap method. */ @Test public void swap() { MarkupContainer c = new WebMarkupContainer("parent"); Component c1 = new WebComponent("c1"); Component c2 = new WebComponent("c2"); Component c3 = new WebComponent("c3"); c.add(c1); c.add(c2); c.add(c3); assertThat(c.get(0), is(c1)); assertThat(c.get(1), is(c2)); assertThat(c.get(2), is(c3)); c.swap(0, 1); assertThat(c.get(0), is(c2)); assertThat(c.get(1), is(c1)); assertThat(c.get(2), is(c3)); c.swap(0, 2); assertThat(c.get(0), is(c3)); assertThat(c.get(1), is(c1)); assertThat(c.get(2), is(c2)); } /** * https://issues.apache.org/jira/browse/WICKET-4006 */ @Test(expected = IllegalArgumentException.class) public void addMyself() { WebMarkupContainer me = new WebMarkupContainer("a"); me.add(me); } /** * https://issues.apache.org/jira/browse/WICKET-5911 */ @Test public void rerenderAfterRenderFailure() { FirstRenderFailsPage page = new FirstRenderFailsPage(); try { tester.startPage(page); } catch (WicketRuntimeException expected) { } tester.startPage(page); // rendering flags where properly reset, so second rendering works properly assertEquals(2, page.beforeRenderCalls); } /** * https://issues.apache.org/jira/browse/WICKET-4012 */ @Test public void afterRenderJustOnce() { AfterRenderJustOncePage page = new AfterRenderJustOncePage(); tester.startPage(page); assertEquals(1, page.afterRenderCalls); } /** * https://issues.apache.org/jira/browse/WICKET-4016 */ @Test public void callToStringFromConstructor() { ToStringComponent page = new ToStringComponent(); } private static class ToStringComponent extends WebMarkupContainer { private ToStringComponent() { super("id"); toString(true); } } private static class AfterRenderJustOncePage extends WebPage implements IMarkupResourceStreamProvider { private int afterRenderCalls = 0; private AfterRenderJustOncePage() { WebMarkupContainer a1 = new WebMarkupContainer("a1"); add(a1); WebMarkupContainer a2 = new WebMarkupContainer("a2"); a1.add(a2); WebMarkupContainer a3 = new WebMarkupContainer("a3") { @Override protected void onAfterRender() { super.onAfterRender(); afterRenderCalls++; } }; a2.add(a3); } @Override public IResourceStream getMarkupResourceStream(MarkupContainer container, Class<?> containerClass) { return new StringResourceStream( "<html><body><div wicket:id='a1'><div wicket:id='a2'><div wicket:id='a3'></div></div></div></body></html>"); } } private static class FirstRenderFailsPage extends WebPage implements IMarkupResourceStreamProvider { private boolean firstRender = true; private int beforeRenderCalls = 0; private FirstRenderFailsPage() { WebMarkupContainer a1 = new WebMarkupContainer("a1") { @Override protected void onBeforeRender() { super.onBeforeRender(); beforeRenderCalls++; if (firstRender) { firstRender = false; throw new WicketRuntimeException(); } } }; add(a1); } @Override public IResourceStream getMarkupResourceStream(MarkupContainer container, Class<?> containerClass) { return new StringResourceStream( "<html><body><div wicket:id='a1'></div></body></html>"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.yarn.server.federation.store.records.impl.pb; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterId; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterInfo; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterState; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.protobuf.TextFormat; /** * Protocol buffer based implementation of {@link SubClusterInfo}. */ @Private @Unstable public class SubClusterInfoPBImpl extends SubClusterInfo { private SubClusterInfoProto proto = SubClusterInfoProto.getDefaultInstance(); private SubClusterInfoProto.Builder builder = null; private boolean viaProto = false; private SubClusterId subClusterId = null; public SubClusterInfoPBImpl() { builder = SubClusterInfoProto.newBuilder(); } public SubClusterInfoPBImpl(SubClusterInfoProto proto) { this.proto = proto; viaProto = true; } public SubClusterInfoProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } private void mergeLocalToProto() { if (viaProto) { maybeInitBuilder(); } mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = SubClusterInfoProto.newBuilder(proto); } viaProto = false; } private void mergeLocalToBuilder() { if (this.subClusterId != null) { builder.setSubClusterId(convertToProtoFormat(this.subClusterId)); } } @Override public String toString() { return TextFormat.shortDebugString(getProto()); } @Override public SubClusterId getSubClusterId() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; if (this.subClusterId != null) { return this.subClusterId; } if (!p.hasSubClusterId()) { return null; } this.subClusterId = convertFromProtoFormat(p.getSubClusterId()); return this.subClusterId; } @Override public void setSubClusterId(SubClusterId subClusterId) { maybeInitBuilder(); if (subClusterId == null) { builder.clearSubClusterId(); } this.subClusterId = subClusterId; } @Override public String getAMRMServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasAMRMServiceAddress()) ? p.getAMRMServiceAddress() : null; } @Override public void setAMRMServiceAddress(String amRMServiceAddress) { maybeInitBuilder(); if (amRMServiceAddress == null) { builder.clearAMRMServiceAddress(); return; } builder.setAMRMServiceAddress(amRMServiceAddress); } @Override public String getClientRMServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasClientRMServiceAddress()) ? p.getClientRMServiceAddress() : null; } @Override public void setClientRMServiceAddress(String clientRMServiceAddress) { maybeInitBuilder(); if (clientRMServiceAddress == null) { builder.clearClientRMServiceAddress(); return; } builder.setClientRMServiceAddress(clientRMServiceAddress); } @Override public String getRMAdminServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasRMAdminServiceAddress()) ? p.getRMAdminServiceAddress() : null; } @Override public void setRMAdminServiceAddress(String rmAdminServiceAddress) { maybeInitBuilder(); if (rmAdminServiceAddress == null) { builder.clearRMAdminServiceAddress(); return; } builder.setRMAdminServiceAddress(rmAdminServiceAddress); } @Override public String getRMWebServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasRMWebServiceAddress()) ? p.getRMWebServiceAddress() : null; } @Override public void setRMWebServiceAddress(String rmWebServiceAddress) { maybeInitBuilder(); if (rmWebServiceAddress == null) { builder.clearRMWebServiceAddress(); return; } builder.setRMWebServiceAddress(rmWebServiceAddress); } @Override public long getLastHeartBeat() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return p.getLastHeartBeat(); } @Override public void setLastHeartBeat(long time) { maybeInitBuilder(); builder.setLastHeartBeat(time); } @Override public SubClusterState getState() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasState()) { return null; } return convertFromProtoFormat(p.getState()); } @Override public void setState(SubClusterState state) { maybeInitBuilder(); if (state == null) { builder.clearState(); return; } builder.setState(convertToProtoFormat(state)); } @Override public long getLastStartTime() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasLastStartTime()) ? p.getLastStartTime() : 0; } @Override public void setLastStartTime(long lastStartTime) { Preconditions.checkNotNull(builder); builder.setLastStartTime(lastStartTime); } @Override public String getCapability() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasCapability()) ? p.getCapability() : null; } @Override public void setCapability(String capability) { maybeInitBuilder(); if (capability == null) { builder.clearCapability(); return; } builder.setCapability(capability); } private SubClusterId convertFromProtoFormat(SubClusterIdProto clusterId) { return new SubClusterIdPBImpl(clusterId); } private SubClusterIdProto convertToProtoFormat(SubClusterId clusterId) { return ((SubClusterIdPBImpl) clusterId).getProto(); } private SubClusterState convertFromProtoFormat(SubClusterStateProto state) { return SubClusterState.valueOf(state.name()); } private SubClusterStateProto convertToProtoFormat(SubClusterState state) { return SubClusterStateProto.valueOf(state.name()); } }
/* * Licensed to STRATIO (C) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. The STRATIO (C) licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.stratio.crossdata.core.normalizer; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import com.stratio.crossdata.common.exceptions.validation.BadFormatException; import com.stratio.crossdata.core.structures.Join; import org.apache.log4j.Logger; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.stratio.crossdata.common.data.CatalogName; import com.stratio.crossdata.common.data.ClusterName; import com.stratio.crossdata.common.data.ColumnName; import com.stratio.crossdata.common.data.ConnectorName; import com.stratio.crossdata.common.data.DataStoreName; import com.stratio.crossdata.common.data.IndexName; import com.stratio.crossdata.common.data.TableName; import com.stratio.crossdata.common.exceptions.ManifestException; import com.stratio.crossdata.common.exceptions.ValidationException; import com.stratio.crossdata.common.metadata.CatalogMetadata; import com.stratio.crossdata.common.metadata.ClusterMetadata; import com.stratio.crossdata.common.metadata.ColumnMetadata; import com.stratio.crossdata.common.metadata.ColumnType; import com.stratio.crossdata.common.metadata.ConnectorAttachedMetadata; import com.stratio.crossdata.common.metadata.DataType; import com.stratio.crossdata.common.metadata.IndexMetadata; import com.stratio.crossdata.common.metadata.TableMetadata; import com.stratio.crossdata.common.statements.structures.AbstractRelation; import com.stratio.crossdata.common.statements.structures.AsteriskSelector; import com.stratio.crossdata.common.statements.structures.ColumnSelector; import com.stratio.crossdata.common.statements.structures.IntegerSelector; import com.stratio.crossdata.common.statements.structures.Operator; import com.stratio.crossdata.common.statements.structures.OrderByClause; import com.stratio.crossdata.common.statements.structures.Relation; import com.stratio.crossdata.common.statements.structures.SelectExpression; import com.stratio.crossdata.common.statements.structures.Selector; import com.stratio.crossdata.common.statements.structures.StringSelector; import com.stratio.crossdata.common.utils.Constants; import com.stratio.crossdata.core.MetadataManagerTestHelper; import com.stratio.crossdata.core.metadata.MetadataManager; import com.stratio.crossdata.core.query.BaseQuery; import com.stratio.crossdata.core.query.SelectParsedQuery; import com.stratio.crossdata.core.query.SelectValidatedQuery; import com.stratio.crossdata.core.statements.SelectStatement; import com.stratio.crossdata.core.structures.GroupByClause; public class NormalizerTest { /** * Class logger. */ private static final Logger LOG = Logger.getLogger(MetadataManagerTestHelper.class); @BeforeClass public void setUp() throws ManifestException { MetadataManagerTestHelper.HELPER.initHelper(); MetadataManagerTestHelper.HELPER.createTestEnvironment(); } @AfterClass public void tearDown() throws Exception { MetadataManagerTestHelper.HELPER.closeHelper(); } @Test public void insertData() throws Exception { // DATASTORE MetadataManagerTestHelper.HELPER.insertDataStore("Cassandra", "production"); // CLUSTER ClusterName clusterName = new ClusterName("testing"); DataStoreName dataStoreRef = new DataStoreName("Cassandra"); Map<Selector, Selector> clusterOptions = new HashMap<>(); Map<ConnectorName, ConnectorAttachedMetadata> connectorAttachedRefs = new HashMap<>(); ClusterMetadata clusterMetadata = new ClusterMetadata(clusterName, dataStoreRef, clusterOptions, connectorAttachedRefs); MetadataManager.MANAGER.createCluster(clusterMetadata, false); // CATALOG 1 HashMap<TableName, TableMetadata> tables = new HashMap<>(); TableName tableName = new TableName("demo", "tableClients"); Map<Selector, Selector> options = new HashMap<>(); LinkedHashMap<ColumnName, ColumnMetadata> columns = new LinkedHashMap<>(); ColumnMetadata columnMetadata = new ColumnMetadata( new ColumnName(tableName, "clientId"), new Object[] { }, new ColumnType(DataType.TEXT)); columns.put(new ColumnName(tableName, "clientId"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "colSales"), new Object[] { }, new ColumnType(DataType.INT)); columns.put(new ColumnName(tableName, "colSales"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "gender"), new Object[] { }, new ColumnType(DataType.TEXT)); columns.put(new ColumnName(tableName, "gender"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "colExpenses"), new Object[] { }, new ColumnType(DataType.INT)); columns.put(new ColumnName(tableName, "colExpenses"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "year"), new Object[] { }, new ColumnType(DataType.INT)); columns.put(new ColumnName(tableName, "year"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "colPlace"), new Object[] { }, new ColumnType(DataType.TEXT)); columns.put(new ColumnName(tableName, "colPlace"), columnMetadata); Map<IndexName, IndexMetadata> indexes = new HashMap<>(); ClusterName clusterRef = new ClusterName("testing"); LinkedList<ColumnName> partitionKey = new LinkedList<>(); partitionKey.add(new ColumnName("demo", "tableClients", "clientId")); LinkedList<ColumnName> clusterKey = new LinkedList<>(); TableMetadata tableMetadata = new TableMetadata( tableName, options, columns, indexes, clusterRef, partitionKey, clusterKey ); tables.put(new TableName("demo", "tableClients"), tableMetadata); CatalogMetadata catalogMetadata = new CatalogMetadata( new CatalogName("demo"), // name new HashMap<Selector, Selector>(), // options tables // tables ); MetadataManager.MANAGER.createCatalog(catalogMetadata, false); assertTrue(MetadataManager.MANAGER.exists(catalogMetadata.getName()), System.lineSeparator() + "Catalog: " + catalogMetadata.getName() + " not found in the Metadata Manager"); // CATALOG 2 tables = new HashMap<>(); tableName = new TableName("myCatalog", "tableCostumers"); options = new HashMap<>(); columns = new LinkedHashMap<>(); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "assistantId"), new Object[] { }, new ColumnType(DataType.TEXT)); columns.put(new ColumnName(tableName, "assistantId"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "age"), new Object[] { }, new ColumnType(DataType.INT)); columns.put(new ColumnName(tableName, "age"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "colFee"), new Object[] { }, new ColumnType(DataType.INT)); columns.put(new ColumnName(tableName, "colFee"), columnMetadata); columnMetadata = new ColumnMetadata( new ColumnName(tableName, "colCity"), new Object[] { }, new ColumnType(DataType.TEXT)); columns.put(new ColumnName(tableName, "colCity"), columnMetadata); indexes = new HashMap<>(); clusterRef = new ClusterName("myCluster"); partitionKey.clear(); partitionKey.add(new ColumnName("myCatalog", "tableCostumers", "assistantId")); clusterKey = new LinkedList<>(); tableMetadata = new TableMetadata( tableName, options, columns, indexes, clusterRef, partitionKey, clusterKey ); tables.put(new TableName("myCatalog", "tableCostumers"), tableMetadata); catalogMetadata = new CatalogMetadata( new CatalogName("myCatalog"), // name new HashMap<Selector, Selector>(), // options tables // tables ); MetadataManager.MANAGER.createCatalog(catalogMetadata, false); LOG.info("Data inserted in the MetadataManager for the NormalizedTest"); assertTrue(MetadataManager.MANAGER.exists(catalogMetadata.getName()), System.lineSeparator() + "Catalog: " + catalogMetadata.getName() + " not found in the Metadata Manager"); } public void testSelectedParserQuery(SelectParsedQuery selectParsedQuery, String expectedText, String methodName) { Normalizer normalizer = new Normalizer(); SelectValidatedQuery result = null; try { result = normalizer.normalize(selectParsedQuery); } catch (ValidationException e) { fail("Test failed: " + methodName + System.lineSeparator(), e); } assertTrue(result.toString().equalsIgnoreCase(expectedText), "Test failed: " + methodName + System.lineSeparator() + "Result: " + result.toString() + System.lineSeparator() + "Expected: " + expectedText); } @Test public void testNormalizeWhereOrderGroup() throws Exception { insertData(); String methodName = "testNormalizeWhereOrderGroup"; String inputText = "SELECT colSales, colExpenses FROM tableClients " + "WHERE colCity = 'Madrid' " + "GROUP BY colSales, colExpenses " + "ORDER BY age;"; String expectedText = "SELECT demo.tableClients.colSales, demo.tableClients.colExpenses FROM demo.tableClients " + "WHERE demo.tableClients.colPlace = 'Madrid' " + "GROUP BY demo.tableClients.colSales, demo.tableClients.colExpenses " + "ORDER BY demo.tableClients.year"; // BASE QUERY BaseQuery baseQuery = new BaseQuery(UUID.randomUUID().toString(), inputText, new CatalogName("demo"),"sessionTest"); // SELECTORS List<Selector> selectorList = new ArrayList<>(); selectorList.add(new ColumnSelector(new ColumnName(null, "colSales"))); selectorList.add(new ColumnSelector(new ColumnName(null, "colExpenses"))); SelectExpression selectExpression = new SelectExpression(selectorList); // SELECT STATEMENT SelectStatement selectStatement = new SelectStatement(selectExpression, new TableName("demo", "tableClients")); // WHERE CLAUSES List<AbstractRelation> where = new ArrayList<>(); where.add(new Relation(new ColumnSelector(new ColumnName(null, "colPlace")), Operator.EQ, new StringSelector("Madrid"))); selectStatement.setWhere(where); // ORDER BY List<Selector> selectorListOrder = new ArrayList<>(); selectorListOrder.add(new ColumnSelector(new ColumnName(null, "year"))); OrderByClause orderBy = new OrderByClause(new ColumnSelector(new ColumnName(null, "year"))); List<OrderByClause> orderByClauses = new ArrayList<>(); orderByClauses.add(orderBy); selectStatement.setOrderByClauses(orderByClauses); // GROUP BY List<Selector> groupBy = new ArrayList<>(); groupBy.add(new ColumnSelector(new ColumnName(null, "colSales"))); groupBy.add(new ColumnSelector(new ColumnName(null, "colExpenses"))); selectStatement.setGroupByClause(new GroupByClause(groupBy)); SelectParsedQuery selectParsedQuery = new SelectParsedQuery(baseQuery, selectStatement); Normalizer normalizer = new Normalizer(); SelectValidatedQuery result = null; try { result = normalizer.normalize(selectParsedQuery); } catch (ValidationException e) { fail("Test failed: " + methodName + System.lineSeparator(), e); } assertTrue(result.toString().equalsIgnoreCase(expectedText), "Test failed: " + methodName + System.lineSeparator() + "Result: " + result.toString() + System.lineSeparator() + "Expected: " + expectedText); } @Test public void testNormalizeInnerJoin() throws Exception { insertData(); String methodName = "testNormalizeInnerJoin"; String inputText = "SELECT colSales, colFee FROM tableClients " + "INNER JOIN tableCostumers ON assistantId = clientId " + "WHERE colCity = 'Madrid' " + "GROUP BY colSales, colFee " + "ORDER BY age;"; String expectedText = "SELECT demo.tableClients.colSales, myCatalog.tableCostumers.colFee FROM demo.tableClients " + "INNER JOIN myCatalog.tableCostumers ON myCatalog.tableCostumers.assistantId = demo.tableClients.clientId " + "WHERE myCatalog.tableCostumers.colCity = 'Madrid' " + "GROUP BY demo.tableClients.colSales, myCatalog.tableCostumers.colFee " + "ORDER BY myCatalog.tableCostumers.age"; // BASE QUERY BaseQuery baseQuery = new BaseQuery(UUID.randomUUID().toString(), inputText, new CatalogName("demo"),"sessionTest"); // SELECTORS List<Selector> selectorList = new ArrayList<>(); selectorList.add(new ColumnSelector(new ColumnName(null, "colSales"))); selectorList.add(new ColumnSelector(new ColumnName(null, "colFee"))); SelectExpression selectExpression = new SelectExpression(selectorList); // SELECT STATEMENT SelectStatement selectStatement = new SelectStatement(selectExpression, new TableName("demo", "tableClients")); List<AbstractRelation> joinRelations = new ArrayList<>(); Relation relation = new Relation( new ColumnSelector(new ColumnName(null, "assistantId")), Operator.EQ, new ColumnSelector(new ColumnName(null, "clientId"))); joinRelations.add(relation); List<TableName> tables = new ArrayList<>(); tables.add(new TableName("demo", "tableClients")); tables.add(new TableName("myCatalog", "tableCostumers")); Join join = new Join(tables, joinRelations); selectStatement.addJoin(join); // WHERE CLAUSES List<AbstractRelation> where = new ArrayList<>(); where.add(new Relation(new ColumnSelector(new ColumnName(null, "colCity")), Operator.EQ, new StringSelector("Madrid"))); selectStatement.setWhere(where); // ORDER BY List<Selector> selectorListOrder = new ArrayList<>(); selectorListOrder.add(new ColumnSelector(new ColumnName(null, "age"))); OrderByClause orderBy = new OrderByClause(new ColumnSelector(new ColumnName(null, "age"))); List<OrderByClause> orderByClauses = new ArrayList<>(); orderByClauses.add(orderBy); selectStatement.setOrderByClauses(orderByClauses); // GROUP BY List<Selector> groupBy = new ArrayList<>(); groupBy.add(new ColumnSelector(new ColumnName(null, "colSales"))); groupBy.add(new ColumnSelector(new ColumnName(null, "colFee"))); selectStatement.setGroupByClause(new GroupByClause(groupBy)); SelectParsedQuery selectParsedQuery = new SelectParsedQuery(baseQuery, selectStatement); Normalizer normalizer = new Normalizer(); SelectValidatedQuery result = null; try { result = normalizer.normalize(selectParsedQuery); } catch (ValidationException e) { fail("Test failed: " + methodName + System.lineSeparator(), e); } assertTrue(result.toString().equalsIgnoreCase(expectedText), "Test failed: " + methodName + System.lineSeparator() + "Result: " + result.toString() + System.lineSeparator() + "Expected: " + expectedText); } @SuppressWarnings("PMD.JUnitTestShouldIncludeAssert") @Test(expectedExceptions = BadFormatException.class) public void testNormalizeWrongInnerJoin() throws Exception { insertData(); String inputText = "SELECT colSales FROM tableClients " + "INNER JOIN tableClients ON assistantId = clientId "; // BASE QUERY BaseQuery baseQuery = new BaseQuery(UUID.randomUUID().toString(), inputText, new CatalogName("demo"),"sessionTest"); // SELECTORS List<Selector> selectorList = new ArrayList<>(); selectorList.add(new ColumnSelector(new ColumnName(null, "colSales"))); SelectExpression selectExpression = new SelectExpression(selectorList); // SELECT STATEMENT SelectStatement selectStatement = new SelectStatement(selectExpression, new TableName("demo", "tableClients")); List<AbstractRelation> joinRelations = new ArrayList<>(); Relation relation = new Relation( new ColumnSelector(new ColumnName(new TableName("myCatalog","tableCostumers"), "assistantId")), Operator.EQ, new ColumnSelector(new ColumnName(null, "clientId"))); joinRelations.add(relation); List<TableName> tables = new ArrayList<>(); tables.add(new TableName("demo", "tableClients")); Join join = new Join(tables, joinRelations); selectStatement.addJoin(join); SelectParsedQuery selectParsedQuery = new SelectParsedQuery(baseQuery, selectStatement); //VALIDATE THE QUERY Normalizer normalizer = new Normalizer(); normalizer.normalize(selectParsedQuery); } @SuppressWarnings("PMD.JUnitTestShouldIncludeAssert") @Test(expectedExceptions = ValidationException.class) public void testNormalizeWrongBasicSelect() throws Exception { insertData(); String inputText = "SELECT myCatalog.tableCostumers.colFee FROM demo.tableClients"; // BASE QUERY BaseQuery baseQuery = new BaseQuery(UUID.randomUUID().toString(), inputText, new CatalogName("demo"),"sessionTest"); // SELECTORS List<Selector> selectorList = new ArrayList<>(); selectorList.add(new ColumnSelector(new ColumnName(null, "colSales"))); selectorList.add(new ColumnSelector(new ColumnName(new TableName("myCatalog","tableCostumers"), "colFee"))); SelectExpression selectExpression = new SelectExpression(selectorList); // SELECT STATEMENT SelectStatement selectStatement = new SelectStatement(selectExpression, new TableName("demo", "tableClients")); SelectParsedQuery selectParsedQuery = new SelectParsedQuery(baseQuery, selectStatement); //VALIDATE THE QUERY Normalizer normalizer = new Normalizer(); normalizer.normalize(selectParsedQuery); } @Test public void testNormalizeSubquery() throws Exception { insertData(); String methodName = "testNormalizeSubquery"; String inputText = "SELECT * FROM " + "( SELECT colsales, 1 FROM tableClients ) AS t"; String virtualTableQN = Constants.VIRTUAL_NAME +".t"; String expectedText = "SELECT "+virtualTableQN+".colSales, "+virtualTableQN+".1 FROM ( SELECT demo.tableClients.colsales, 1 FROM demo.tableClients ) AS t"; // BASE QUERY BaseQuery baseQuery = new BaseQuery(UUID.randomUUID().toString(), inputText, new CatalogName("Constants.VIRTUAL_NAME"),"sessionTest"); // SELECTORS List<Selector> selectorList = new ArrayList<>(); selectorList.add(new ColumnSelector(new ColumnName(null, "colSales"))); selectorList.add(new IntegerSelector(1)); SelectExpression selectExpression = new SelectExpression(selectorList); // SELECT STATEMENT SelectStatement subqueryStatement = new SelectStatement(selectExpression, new TableName("demo", "tableClients")); //SELECT List<Selector> selectorList2 = new ArrayList<>(); selectorList2.add(new AsteriskSelector(new TableName(Constants.VIRTUAL_NAME,"t"))); SelectExpression selectExpression2 = new SelectExpression(selectorList2); SelectStatement selectStatement = new SelectStatement(selectExpression2,new TableName(Constants.VIRTUAL_NAME,"t")); selectStatement.setSubquery(subqueryStatement,"t"); SelectParsedQuery selectParsedQuery = new SelectParsedQuery(baseQuery, selectStatement); Normalizer normalizer = new Normalizer(); SelectValidatedQuery result = null; try { result = normalizer.normalize(selectParsedQuery); } catch (ValidationException e) { fail("Test failed: " + methodName + System.lineSeparator(), e); } assertTrue(result.toString().equalsIgnoreCase(expectedText), "Test failed: " + methodName + System.lineSeparator() + "Result: " + result.toString() + System.lineSeparator() + "Expected: " + expectedText); } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.search.FieldCache; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase; /** * * Tests DocValues integration into IndexWriter * */ @SuppressCodecs("Lucene3x") public class TestDocValuesIndexing extends LuceneTestCase { /* * - add test for multi segment case with deletes * - add multithreaded tests / integrate into stress indexing? */ public void testAddIndexes() throws IOException { Directory d1 = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d1); Document doc = new Document(); doc.add(newStringField("id", "1", Field.Store.YES)); doc.add(new NumericDocValuesField("dv", 1)); w.addDocument(doc); IndexReader r1 = w.getReader(); w.close(); Directory d2 = newDirectory(); w = new RandomIndexWriter(random(), d2); doc = new Document(); doc.add(newStringField("id", "2", Field.Store.YES)); doc.add(new NumericDocValuesField("dv", 2)); w.addDocument(doc); IndexReader r2 = w.getReader(); w.close(); Directory d3 = newDirectory(); w = new RandomIndexWriter(random(), d3); w.addIndexes(SlowCompositeReaderWrapper.wrap(r1), SlowCompositeReaderWrapper.wrap(r2)); r1.close(); d1.close(); r2.close(); d2.close(); w.forceMerge(1); DirectoryReader r3 = w.getReader(); w.close(); AtomicReader sr = getOnlySegmentReader(r3); assertEquals(2, sr.numDocs()); NumericDocValues docValues = sr.getNumericDocValues("dv"); assertNotNull(docValues); r3.close(); d3.close(); } public void testMultiValuedDocValuesField() throws Exception { Directory d = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); Field f = new NumericDocValuesField("field", 17); doc.add(f); // add the doc w.addDocument(doc); // Index doc values are single-valued so we should not // be able to add same field more than once: doc.add(f); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } DirectoryReader r = w.getReader(); w.close(); assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0)); r.close(); d.close(); } public void testDifferentTypedDocValuesField() throws Exception { Directory d = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); doc.add(new NumericDocValuesField("field", 17)); w.addDocument(doc); // Index doc values are single-valued so we should not // be able to add same field more than once: doc.add(new BinaryDocValuesField("field", new BytesRef("blah"))); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } DirectoryReader r = w.getReader(); w.close(); assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0)); r.close(); d.close(); } public void testDifferentTypedDocValuesField2() throws Exception { Directory d = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); doc.add(new NumericDocValuesField("field", 17)); w.addDocument(doc); // Index doc values are single-valued so we should not // be able to add same field more than once: doc.add(new SortedDocValuesField("field", new BytesRef("hello"))); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } DirectoryReader r = w.getReader(); assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0)); r.close(); w.close(); d.close(); } // LUCENE-3870 public void testLengthPrefixAcrossTwoPages() throws Exception { Directory d = newDirectory(); IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); Document doc = new Document(); byte[] bytes = new byte[32764]; BytesRef b = new BytesRef(); b.bytes = bytes; b.length = bytes.length; doc.add(new SortedDocValuesField("field", b)); w.addDocument(doc); bytes[0] = 1; w.addDocument(doc); w.forceMerge(1); DirectoryReader r = w.getReader(); BinaryDocValues s = FieldCache.DEFAULT.getTerms(getOnlySegmentReader(r), "field", false); BytesRef bytes1 = s.get(0); assertEquals(bytes.length, bytes1.length); bytes[0] = 0; assertEquals(b, bytes1); bytes1 = s.get(1); assertEquals(bytes.length, bytes1.length); bytes[0] = 1; assertEquals(b, bytes1); r.close(); w.close(); d.close(); } public void testDocValuesUnstored() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwconfig = newIndexWriterConfig(new MockAnalyzer(random())); iwconfig.setMergePolicy(newLogMergePolicy()); IndexWriter writer = new IndexWriter(dir, iwconfig); for (int i = 0; i < 50; i++) { Document doc = new Document(); doc.add(new NumericDocValuesField("dv", i)); doc.add(new TextField("docId", "" + i, Field.Store.YES)); writer.addDocument(doc); } DirectoryReader r = writer.getReader(); AtomicReader slow = SlowCompositeReaderWrapper.wrap(r); FieldInfos fi = slow.getFieldInfos(); FieldInfo dvInfo = fi.fieldInfo("dv"); assertTrue(dvInfo.hasDocValues()); NumericDocValues dv = slow.getNumericDocValues("dv"); for (int i = 0; i < 50; i++) { assertEquals(i, dv.get(i)); Document d = slow.document(i); // cannot use d.get("dv") due to another bug! assertNull(d.getField("dv")); assertEquals(Integer.toString(i), d.get("docId")); } slow.close(); writer.close(); dir.close(); } // Same field in one document as different types: public void testMixedTypesSameDocument() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); w.addDocument(new Document()); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } IndexReader ir = w.getReader(); assertEquals(1, ir.numDocs()); ir.close(); w.close(); dir.close(); } // Two documents with same field as different types: public void testMixedTypesDifferentDocuments() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); doc = new Document(); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } IndexReader ir = w.getReader(); assertEquals(1, ir.numDocs()); ir.close(); w.close(); dir.close(); } public void testAddSortedTwice() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); iwriter.addDocument(doc); doc.add(new SortedDocValuesField("dv", new BytesRef("bar!"))); try { iwriter.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException expected) { // expected if (VERBOSE) { System.out.println("hit exc:"); expected.printStackTrace(System.out); } } IndexReader ir = iwriter.getReader(); assertEquals(1, ir.numDocs()); ir.close(); iwriter.close(); directory.close(); } public void testAddBinaryTwice() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new BinaryDocValuesField("dv", new BytesRef("foo!"))); iwriter.addDocument(doc); doc.add(new BinaryDocValuesField("dv", new BytesRef("bar!"))); try { iwriter.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException expected) { // expected } IndexReader ir = iwriter.getReader(); assertEquals(1, ir.numDocs()); ir.close(); iwriter.close(); directory.close(); } public void testAddNumericTwice() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 1)); iwriter.addDocument(doc); doc.add(new NumericDocValuesField("dv", 2)); try { iwriter.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException expected) { // expected } IndexReader ir = iwriter.getReader(); assertEquals(1, ir.numDocs()); ir.close(); iwriter.close(); directory.close(); } public void testTooLargeSortedBytes() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("just fine"))); iwriter.addDocument(doc); doc = new Document(); byte bytes[] = new byte[100000]; BytesRef b = new BytesRef(bytes); random().nextBytes(bytes); doc.add(new SortedDocValuesField("dv", b)); try { iwriter.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException expected) { // expected } IndexReader ir = iwriter.getReader(); assertEquals(1, ir.numDocs()); ir.close(); iwriter.close(); directory.close(); } public void testTooLargeTermSortedSetBytes() throws IOException { assumeTrue("codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new SortedSetDocValuesField("dv", new BytesRef("just fine"))); iwriter.addDocument(doc); doc = new Document(); byte bytes[] = new byte[100000]; BytesRef b = new BytesRef(bytes); random().nextBytes(bytes); doc.add(new SortedSetDocValuesField("dv", b)); try { iwriter.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException expected) { // expected } IndexReader ir = iwriter.getReader(); assertEquals(1, ir.numDocs()); ir.close(); iwriter.close(); directory.close(); } // Two documents across segments public void testMixedTypesDifferentSegments() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); w.commit(); doc = new Document(); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); try { w.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException iae) { // expected } w.close(); dir.close(); } // Add inconsistent document after deleteAll public void testMixedTypesAfterDeleteAll() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); w.deleteAll(); doc = new Document(); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); w.addDocument(doc); w.close(); dir.close(); } // Add inconsistent document after reopening IW w/ create public void testMixedTypesAfterReopenCreate() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); w.close(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE); w = new IndexWriter(dir, iwc); doc = new Document(); w.addDocument(doc); w.close(); dir.close(); } public void testMixedTypesAfterReopenAppend1() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); w.close(); w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); doc = new Document(); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); try { w.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException iae) { // expected } w.close(); dir.close(); } public void testMixedTypesAfterReopenAppend2() throws IOException { assumeTrue("codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))) ; Document doc = new Document(); doc.add(new SortedSetDocValuesField("foo", new BytesRef("foo"))); w.addDocument(doc); w.close(); doc = new Document(); w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); doc.add(new StringField("foo", "bar", Field.Store.NO)); doc.add(new BinaryDocValuesField("foo", new BytesRef("foo"))); try { // NOTE: this case follows a different code path inside // DefaultIndexingChain/FieldInfos, because the field (foo) // is first added without DocValues: w.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException iae) { // expected } w.forceMerge(1); w.close(); dir.close(); } public void testMixedTypesAfterReopenAppend3() throws IOException { assumeTrue("codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))) ; Document doc = new Document(); doc.add(new SortedSetDocValuesField("foo", new BytesRef("foo"))); w.addDocument(doc); w.close(); doc = new Document(); w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); doc.add(new StringField("foo", "bar", Field.Store.NO)); doc.add(new BinaryDocValuesField("foo", new BytesRef("foo"))); try { // NOTE: this case follows a different code path inside // DefaultIndexingChain/FieldInfos, because the field (foo) // is first added without DocValues: w.addDocument(doc); fail("did not get expected exception"); } catch (IllegalArgumentException iae) { // expected } // Also add another document so there is a segment to write here: w.addDocument(new Document()); w.forceMerge(1); w.close(); dir.close(); } // Two documents with same field as different types, added // from separate threads: public void testMixedTypesDifferentThreads() throws Exception { Directory dir = newDirectory(); final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); final CountDownLatch startingGun = new CountDownLatch(1); final AtomicBoolean hitExc = new AtomicBoolean(); Thread[] threads = new Thread[3]; for(int i=0;i<3;i++) { Field field; if (i == 0) { field = new SortedDocValuesField("foo", new BytesRef("hello")); } else if (i == 1) { field = new NumericDocValuesField("foo", 0); } else { field = new BinaryDocValuesField("foo", new BytesRef("bazz")); } final Document doc = new Document(); doc.add(field); threads[i] = new Thread() { @Override public void run() { try { startingGun.await(); w.addDocument(doc); } catch (IllegalArgumentException iae) { // expected hitExc.set(true); } catch (Exception e) { throw new RuntimeException(e); } } }; threads[i].start(); } startingGun.countDown(); for(Thread t : threads) { t.join(); } assertTrue(hitExc.get()); w.close(); dir.close(); } // Adding documents via addIndexes public void testMixedTypesViaAddIndexes() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new NumericDocValuesField("foo", 0)); w.addDocument(doc); // Make 2nd index w/ inconsistent field Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))); doc = new Document(); doc.add(new SortedDocValuesField("foo", new BytesRef("hello"))); w2.addDocument(doc); w2.close(); try { w.addIndexes(new Directory[] {dir2}); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } IndexReader r = DirectoryReader.open(dir2); try { w.addIndexes(new IndexReader[] {r}); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } r.close(); dir2.close(); w.close(); dir.close(); } public void testIllegalTypeChange() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); try { writer.addDocument(doc); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } IndexReader ir = writer.getReader(); assertEquals(1, ir.numDocs()); ir.close(); writer.close(); dir.close(); } public void testIllegalTypeChangeAcrossSegments() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir, conf); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); try { writer.addDocument(doc); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } writer.close(); dir.close(); } public void testTypeChangeAfterCloseAndDeleteAll() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir, conf); writer.deleteAll(); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); writer.close(); dir.close(); } public void testTypeChangeAfterDeleteAll() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.deleteAll(); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); writer.close(); dir.close(); } public void testTypeChangeAfterCommitAndDeleteAll() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.commit(); writer.deleteAll(); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); writer.close(); dir.close(); } public void testTypeChangeAfterOpenCreate() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE); writer = new IndexWriter(dir, conf); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); writer.close(); dir.close(); } public void testTypeChangeViaAddIndexes() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); Directory dir2 = newDirectory(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir2, conf); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); try { writer.addIndexes(dir); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } writer.close(); dir.close(); dir2.close(); } public void testTypeChangeViaAddIndexesIR() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); Directory dir2 = newDirectory(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir2, conf); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); writer.addDocument(doc); IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)}; try { writer.addIndexes(readers); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } readers[0].close(); writer.close(); dir.close(); dir2.close(); } public void testTypeChangeViaAddIndexes2() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); Directory dir2 = newDirectory(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir2, conf); writer.addIndexes(dir); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); try { writer.addDocument(doc); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } writer.close(); dir2.close(); dir.close(); } public void testTypeChangeViaAddIndexesIR2() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); writer.close(); Directory dir2 = newDirectory(); conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir2, conf); IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)}; writer.addIndexes(readers); readers[0].close(); doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo"))); try { writer.addDocument(doc); fail("did not hit exception"); } catch (IllegalArgumentException iae) { // expected } writer.close(); dir2.close(); dir.close(); } public void testDocsWithField() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); doc = new Document(); doc.add(new TextField("dv", "some text", Field.Store.NO)); doc.add(new NumericDocValuesField("dv", 0L)); writer.addDocument(doc); DirectoryReader r = writer.getReader(); writer.close(); AtomicReader subR = r.leaves().get(0).reader(); assertEquals(2, subR.numDocs()); Bits bits = FieldCache.DEFAULT.getDocsWithField(subR, "dv"); assertTrue(bits.get(0)); assertTrue(bits.get(1)); r.close(); dir.close(); } public void testSameFieldNameForPostingAndDocValue() throws Exception { // LUCENE-5192: FieldInfos.Builder neglected to update // globalFieldNumbers.docValuesType map if the field existed, resulting in // potentially adding the same field with different DV types. Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); doc.add(new StringField("f", "mock-value", Store.NO)); doc.add(new NumericDocValuesField("f", 5)); writer.addDocument(doc); writer.commit(); doc = new Document(); doc.add(new BinaryDocValuesField("f", new BytesRef("mock"))); try { writer.addDocument(doc); fail("should not have succeeded to add a field with different DV type than what already exists"); } catch (IllegalArgumentException e) { writer.rollback(); } dir.close(); } public void testExcIndexingDocBeforeDocValues() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setDocValueType(DocValuesType.SORTED); ft.freeze(); Field field = new Field("test", "value", ft); field.setTokenStream(new TokenStream() { @Override public boolean incrementToken() { throw new RuntimeException("no"); } }); doc.add(field); try { w.addDocument(doc); fail("did not hit exception"); } catch (RuntimeException re) { // expected } w.addDocument(new Document()); w.close(); dir.close(); } }
package main.java.com.vnc.oeo.mediaprojectiontest1; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.ImageFormat; import android.graphics.PixelFormat; import android.graphics.Point; import android.hardware.display.DisplayManager; import android.media.ImageReader; import android.os.Build; import android.util.DisplayMetrics; import android.util.Log; import android.view.Display; import org.apache.http.conn.util.InetAddressUtils; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.URL; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; /** * Created by oeo on 31/07/2015. */ public class VNCUtility { public static final String TAG = VNCUtility.class.getName(); public static int getIntFromColor(int Red, int Green, int Blue){ Red = (Red << 16) & 0x00FF0000; //Shift red 16-bits and mask out other stuff Green = (Green << 8) & 0x0000FF00; //Shift Green 8-bits and mask out other stuff Blue = Blue & 0x000000FF; //Mask out anything not blue. return 0xFF000000 | Red | Green | Blue; //0xFF000000 for 100% Alpha. Bitwise OR everything together. } @TargetApi(Build.VERSION_CODES.KITKAT) public static ImageReader createImageReader(Activity c){ DisplayMetrics metrics = c.getResources().getDisplayMetrics(); int density = metrics.densityDpi; int flags = DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC; //DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC; Display display = c.getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); return ImageReader.newInstance(size.x, size.y, PixelFormat.RGB_565, 2); } public static String getColourForInt(int pixel){ int a = Color.alpha(pixel); int r = Color.red(pixel); int g = Color.green(pixel); int b = Color.blue(pixel); return String.format("#%02X%02X%02X%02X", a, r, g, b); } public static void printReaderFormat(int format){ if(format == ImageFormat.JPEG){ Log.e(TAG, "JPEG"); } else if(format == ImageFormat.NV16){ Log.e(TAG, "NV16"); } else if(format == ImageFormat.NV21){ Log.e(TAG, "NV21"); } else if(format == ImageFormat.RAW10){ Log.e(TAG, "RAW10"); } else if(format== ImageFormat.RAW_SENSOR){ Log.e(TAG, "RAW_SENSOR"); } else if(format == ImageFormat.RGB_565){ Log.e(TAG, "RGB_565"); } else if(format == ImageFormat.YUV_420_888){ Log.e(TAG, "YUV_420_888"); } else if(format == ImageFormat.YUY2){ Log.e(TAG, "YUY2"); } else if(format == ImageFormat.YV12){ Log.e(TAG, "YV12"); } else if(format == ImageFormat.UNKNOWN){ Log.e(TAG, "UNKNOWN"); } else if(format == PixelFormat.RGB_888){ Log.e(TAG, "RGB_888"); } else if(format == PixelFormat.RGB_565){ Log.e(TAG, "PIXEL_RGB_565"); } else if(format == PixelFormat.RGBA_8888){ Log.e(TAG, "RGBA_8888"); } else if(format == PixelFormat.RGBX_8888){ Log.e(TAG, "RGBX_8888"); } else if(format == PixelFormat.TRANSLUCENT){ Log.e(TAG, "TRANSLUCENT"); } else if(format == PixelFormat.TRANSPARENT){ Log.e(TAG, "TRANSPARENT"); } else if(format == PixelFormat.UNKNOWN){ Log.e(TAG, "PIXEL_UNKNOWN"); } else{ Log.e(TAG, "THE FORMAT IS GIBBERISH"); } } public static byte[] bitmapToArray(Bitmap b){ int bytes = b.getByteCount(); ByteBuffer buffer = ByteBuffer.allocate(bytes); //Create a new buffer b.copyPixelsToBuffer(buffer); //Move the byte data to the buffer byte[] array = buffer.array(); //Get the underlying array containing the data. return array; } public static Bitmap getBitmapFromURL(String src) { try { URL url = new URL(src); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setDoInput(true); connection.connect(); InputStream input = connection.getInputStream(); Bitmap myBitmap = BitmapFactory.decodeStream(input); return myBitmap; } catch (IOException e) { // Log exception e.printStackTrace(); Log.e(TAG, e.getMessage()); return null; } } /** * Get IP address from first non-localhost interface * @param useIPv4 true=return ipv4, false=return ipv6 * @return address or empty string */ public static String getIPAddress(boolean useIPv4) { try { List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces()); for (NetworkInterface intf : interfaces) { List<InetAddress> addrs = Collections.list(intf.getInetAddresses()); for (InetAddress addr : addrs) { if (!addr.isLoopbackAddress()) { String sAddr = addr.getHostAddress().toUpperCase(); boolean isIPv4 = InetAddressUtils.isIPv4Address(sAddr); if (useIPv4) { if (isIPv4) return sAddr; } else { if (!isIPv4) { int delim = sAddr.indexOf('%'); // drop ip6 port suffix return delim<0 ? sAddr : sAddr.substring(0, delim); } } } } } } catch (Exception ex) { } // for now eat exceptions return ""; } public static File saveBitmapToFile(Bitmap bitmap, String filename, String path, boolean recycle) { FileOutputStream out=null; try { File f = new File(path,filename); if(!f.exists()) { f.createNewFile(); } out = new FileOutputStream(f); if(bitmap.compress(Bitmap.CompressFormat.PNG, 100, out)) { return f; } } catch (Exception e) { Log.e(TAG, "Could not save bitmap", e); } finally { try{ out.close(); } catch(Throwable ignore) {} if(recycle) { bitmap.recycle(); } } return null; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.nio.reactor; import java.io.IOException; import java.io.InterruptedIOException; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.Collections; import java.util.HashSet; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.http.annotation.ThreadSafe; import org.apache.http.nio.reactor.IOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.nio.reactor.IOReactorStatus; import org.apache.http.nio.reactor.IOSession; import org.apache.http.util.Args; import org.apache.http.util.Asserts; /** * Generic implementation of {@link IOReactor} that can used as a subclass * for more specialized I/O reactors. It is based on a single {@link Selector} * instance. * * @since 4.0 */ @ThreadSafe // public methods only public abstract class AbstractIOReactor implements IOReactor { private volatile IOReactorStatus status; private final Object statusMutex; private final long selectTimeout; private final boolean interestOpsQueueing; private final Selector selector; private final Set<IOSession> sessions; private final Queue<InterestOpEntry> interestOpsQueue; private final Queue<IOSession> closedSessions; private final Queue<ChannelEntry> newChannels; /** * Creates new AbstractIOReactor instance. * * @param selectTimeout the select timeout. * @throws IOReactorException in case if a non-recoverable I/O error. */ public AbstractIOReactor(final long selectTimeout) throws IOReactorException { this(selectTimeout, false); } /** * Creates new AbstractIOReactor instance. * * @param selectTimeout the select timeout. * @param interestOpsQueueing Ops queueing flag. * * @throws IOReactorException in case if a non-recoverable I/O error. * * @since 4.1 */ public AbstractIOReactor(final long selectTimeout, final boolean interestOpsQueueing) throws IOReactorException { super(); Args.positive(selectTimeout, "Select timeout"); this.selectTimeout = selectTimeout; this.interestOpsQueueing = interestOpsQueueing; this.sessions = Collections.synchronizedSet(new HashSet<IOSession>()); this.interestOpsQueue = new ConcurrentLinkedQueue<InterestOpEntry>(); this.closedSessions = new ConcurrentLinkedQueue<IOSession>(); this.newChannels = new ConcurrentLinkedQueue<ChannelEntry>(); try { this.selector = Selector.open(); } catch (final IOException ex) { throw new IOReactorException("Failure opening selector", ex); } this.statusMutex = new Object(); this.status = IOReactorStatus.INACTIVE; } /** * Triggered when the key signals {@link SelectionKey#OP_ACCEPT} readiness. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. */ protected abstract void acceptable(SelectionKey key); /** * Triggered when the key signals {@link SelectionKey#OP_CONNECT} readiness. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. */ protected abstract void connectable(SelectionKey key); /** * Triggered when the key signals {@link SelectionKey#OP_READ} readiness. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. */ protected abstract void readable(SelectionKey key); /** * Triggered when the key signals {@link SelectionKey#OP_WRITE} readiness. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. */ protected abstract void writable(SelectionKey key); /** * Triggered to validate keys currently registered with the selector. This * method is called after each I/O select loop. * <p> * Super-classes can implement this method to run validity checks on * active sessions and include additional processing that needs to be * executed after each I/O select loop. * * @param keys all selection keys registered with the selector. */ protected abstract void validate(Set<SelectionKey> keys); /** * Triggered when new session has been created. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. * @param session new I/O session. */ protected void sessionCreated(final SelectionKey key, final IOSession session) { } /** * Triggered when a session has been closed. * <p> * Super-classes can implement this method to react to the event. * * @param session closed I/O session. */ protected void sessionClosed(final IOSession session) { } /** * Triggered when a session has timed out. * <p> * Super-classes can implement this method to react to the event. * * @param session timed out I/O session. */ protected void sessionTimedOut(final IOSession session) { } /** * Obtains {@link IOSession} instance associated with the given selection * key. * * @param key the selection key. * @return I/O session. */ protected IOSession getSession(final SelectionKey key) { return (IOSession) key.attachment(); } public IOReactorStatus getStatus() { return this.status; } /** * Returns <code>true</code> if interest Ops queueing is enabled, <code>false</code> otherwise. * * @since 4.1 */ public boolean getInterestOpsQueueing() { return this.interestOpsQueueing; } /** * Adds new channel entry. The channel will be asynchronously registered * with the selector. * * @param channelEntry the channel entry. */ public void addChannel(final ChannelEntry channelEntry) { Args.notNull(channelEntry, "Channel entry"); this.newChannels.add(channelEntry); this.selector.wakeup(); } /** * Activates the I/O reactor. The I/O reactor will start reacting to * I/O events and triggering notification methods. * <p> * This method will enter the infinite I/O select loop on * the {@link Selector} instance associated with this I/O reactor. * <p> * The method will remain blocked unto the I/O reactor is shut down or the * execution thread is interrupted. * * @see #acceptable(SelectionKey) * @see #connectable(SelectionKey) * @see #readable(SelectionKey) * @see #writable(SelectionKey) * @see #timeoutCheck(SelectionKey, long) * @see #validate(Set) * @see #sessionCreated(SelectionKey, IOSession) * @see #sessionClosed(IOSession) * * @throws InterruptedIOException if the dispatch thread is interrupted. * @throws IOReactorException in case if a non-recoverable I/O error. */ protected void execute() throws InterruptedIOException, IOReactorException { this.status = IOReactorStatus.ACTIVE; try { for (;;) { final int readyCount; try { readyCount = this.selector.select(this.selectTimeout); } catch (final InterruptedIOException ex) { throw ex; } catch (final IOException ex) { throw new IOReactorException("Unexpected selector failure", ex); } if (this.status == IOReactorStatus.SHUT_DOWN) { // Hard shut down. Exit select loop immediately break; } if (this.status == IOReactorStatus.SHUTTING_DOWN) { // Graceful shutdown in process // Try to close things out nicely closeSessions(); closeNewChannels(); } // Process selected I/O events if (readyCount > 0) { processEvents(this.selector.selectedKeys()); } // Validate active channels validate(this.selector.keys()); // Process closed sessions processClosedSessions(); // If active process new channels if (this.status == IOReactorStatus.ACTIVE) { processNewChannels(); } // Exit select loop if graceful shutdown has been completed if (this.status.compareTo(IOReactorStatus.ACTIVE) > 0 && this.sessions.isEmpty()) { break; } if (this.interestOpsQueueing) { // process all pending interestOps() operations processPendingInterestOps(); } } } catch (final ClosedSelectorException ignore) { } finally { hardShutdown(); synchronized (this.statusMutex) { this.statusMutex.notifyAll(); } } } private void processEvents(final Set<SelectionKey> selectedKeys) { for (final SelectionKey key : selectedKeys) { processEvent(key); } selectedKeys.clear(); } /** * Processes new event on the given selection key. * * @param key the selection key that triggered an event. */ protected void processEvent(final SelectionKey key) { final IOSessionImpl session = (IOSessionImpl) key.attachment(); try { if (key.isAcceptable()) { acceptable(key); } if (key.isConnectable()) { connectable(key); } if (key.isReadable()) { session.resetLastRead(); readable(key); } if (key.isWritable()) { session.resetLastWrite(); writable(key); } } catch (final CancelledKeyException ex) { queueClosedSession(session); key.attach(null); } } /** * Queues the given I/O session to be processed asynchronously as closed. * * @param session the closed I/O session. */ protected void queueClosedSession(final IOSession session) { if (session != null) { this.closedSessions.add(session); } } private void processNewChannels() throws IOReactorException { ChannelEntry entry; while ((entry = this.newChannels.poll()) != null) { final SocketChannel channel; final SelectionKey key; try { channel = entry.getChannel(); channel.configureBlocking(false); key = channel.register(this.selector, SelectionKey.OP_READ); } catch (final ClosedChannelException ex) { final SessionRequestImpl sessionRequest = entry.getSessionRequest(); if (sessionRequest != null) { sessionRequest.failed(ex); } return; } catch (final IOException ex) { throw new IOReactorException("Failure registering channel " + "with the selector", ex); } final SessionClosedCallback sessionClosedCallback = new SessionClosedCallback() { public void sessionClosed(final IOSession session) { queueClosedSession(session); } }; InterestOpsCallback interestOpsCallback = null; if (this.interestOpsQueueing) { interestOpsCallback = new InterestOpsCallback() { public void addInterestOps(final InterestOpEntry entry) { queueInterestOps(entry); } }; } final IOSession session; try { session = new IOSessionImpl(key, interestOpsCallback, sessionClosedCallback); int timeout = 0; try { timeout = channel.socket().getSoTimeout(); } catch (final IOException ex) { // Very unlikely to happen and is not fatal // as the protocol layer is expected to overwrite // this value anyways } session.setAttribute(IOSession.ATTACHMENT_KEY, entry.getAttachment()); session.setSocketTimeout(timeout); } catch (final CancelledKeyException ex) { continue; } try { this.sessions.add(session); final SessionRequestImpl sessionRequest = entry.getSessionRequest(); if (sessionRequest != null) { sessionRequest.completed(session); } key.attach(session); sessionCreated(key, session); } catch (final CancelledKeyException ex) { queueClosedSession(session); key.attach(null); } } } private void processClosedSessions() { IOSession session; while ((session = this.closedSessions.poll()) != null) { if (this.sessions.remove(session)) { try { sessionClosed(session); } catch (final CancelledKeyException ex) { // ignore and move on } } } } private void processPendingInterestOps() { // validity check if (!this.interestOpsQueueing) { return; } InterestOpEntry entry; while ((entry = this.interestOpsQueue.poll()) != null) { // obtain the operation's details final SelectionKey key = entry.getSelectionKey(); final int eventMask = entry.getEventMask(); if (key.isValid()) { key.interestOps(eventMask); } } } private boolean queueInterestOps(final InterestOpEntry entry) { // validity checks Asserts.check(this.interestOpsQueueing, "Interest ops queueing not enabled"); if (entry == null) { return false; } // add this operation to the interestOps() queue this.interestOpsQueue.add(entry); return true; } /** * Triggered to verify whether the I/O session associated with the * given selection key has not timed out. * <p> * Super-classes can implement this method to react to the event. * * @param key the selection key. * @param now current time as long value. */ protected void timeoutCheck(final SelectionKey key, final long now) { final IOSessionImpl session = (IOSessionImpl) key.attachment(); if (session != null) { final int timeout = session.getSocketTimeout(); if (timeout > 0) { if (session.getLastAccessTime() + timeout < now) { sessionTimedOut(session); } } } } /** * Closes out all I/O sessions maintained by this I/O reactor. */ protected void closeSessions() { synchronized (this.sessions) { for (final IOSession session : this.sessions) { session.close(); } } } /** * Closes out all new channels pending registration with the selector of * this I/O reactor. * @throws IOReactorException - not thrown currently */ protected void closeNewChannels() throws IOReactorException { ChannelEntry entry; while ((entry = this.newChannels.poll()) != null) { final SessionRequestImpl sessionRequest = entry.getSessionRequest(); if (sessionRequest != null) { sessionRequest.cancel(); } final SocketChannel channel = entry.getChannel(); try { channel.close(); } catch (final IOException ignore) { } } } /** * Closes out all active channels registered with the selector of * this I/O reactor. * @throws IOReactorException - not thrown currently */ protected void closeActiveChannels() throws IOReactorException { try { final Set<SelectionKey> keys = this.selector.keys(); for (final SelectionKey key : keys) { final IOSession session = getSession(key); if (session != null) { session.close(); } } this.selector.close(); } catch (final IOException ignore) { } } /** * Attempts graceful shutdown of this I/O reactor. */ public void gracefulShutdown() { synchronized (this.statusMutex) { if (this.status != IOReactorStatus.ACTIVE) { // Already shutting down return; } this.status = IOReactorStatus.SHUTTING_DOWN; } this.selector.wakeup(); } /** * Attempts force-shutdown of this I/O reactor. */ public void hardShutdown() throws IOReactorException { synchronized (this.statusMutex) { if (this.status == IOReactorStatus.SHUT_DOWN) { // Already shut down return; } this.status = IOReactorStatus.SHUT_DOWN; } closeNewChannels(); closeActiveChannels(); processClosedSessions(); } /** * Blocks for the given period of time in milliseconds awaiting * the completion of the reactor shutdown. * * @param timeout the maximum wait time. * @throws InterruptedException if interrupted. */ public void awaitShutdown(final long timeout) throws InterruptedException { synchronized (this.statusMutex) { final long deadline = System.currentTimeMillis() + timeout; long remaining = timeout; while (this.status != IOReactorStatus.SHUT_DOWN) { this.statusMutex.wait(remaining); if (timeout > 0) { remaining = deadline - System.currentTimeMillis(); if (remaining <= 0) { break; } } } } } public void shutdown(final long gracePeriod) throws IOReactorException { if (this.status != IOReactorStatus.INACTIVE) { gracefulShutdown(); try { awaitShutdown(gracePeriod); } catch (final InterruptedException ignore) { } } if (this.status != IOReactorStatus.SHUT_DOWN) { hardShutdown(); } } public void shutdown() throws IOReactorException { shutdown(1000); } }
package io.agrest.it; import io.agrest.Ag; import io.agrest.DataResponse; import io.agrest.constraints.Constraint; import io.agrest.it.fixture.JerseyTestOnDerby; import io.agrest.it.fixture.cayenne.E12; import io.agrest.it.fixture.cayenne.E12E13; import io.agrest.it.fixture.cayenne.E17; import io.agrest.it.fixture.cayenne.E18; import io.agrest.it.fixture.cayenne.E2; import io.agrest.it.fixture.cayenne.E3; import org.apache.cayenne.map.DataMap; import org.apache.cayenne.map.ObjAttribute; import org.apache.cayenne.map.ObjEntity; import org.junit.Test; import javax.ws.rs.GET; import javax.ws.rs.MatrixParam; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Configuration; import javax.ws.rs.core.Context; import javax.ws.rs.core.FeatureContext; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import java.util.HashMap; import java.util.Map; import static org.junit.Assert.assertEquals; public class GET_Related_IT extends JerseyTestOnDerby { @Override protected void doAddResources(FeatureContext context) { context.register(Resource.class); } @Test public void testGet_ToMany_Constrained() { // make sure we have e3s for more than one e2 - this will help us // confirm that relationship queries are properly filtered. insert("e2", "id, name", "1, 'xxx'"); insert("e2", "id, name", "2, 'yyy'"); insert("e3", "id, e2_id, name", "7, 2, 'zzz'"); insert("e3", "id, e2_id, name", "8, 1, 'yyy'"); insert("e3", "id, e2_id, name", "9, 1, 'zzz'"); Response r1 = target("/e2/constraints/1/e3s").request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"id\":8},{\"id\":9}],\"total\":2}", r1.readEntity(String.class)); } @Test public void testGet_ToMany_CompoundId() { insert("e17", "id1, id2, name", "1, 1, 'aaa'"); insert("e17", "id1, id2, name", "2, 2, 'bbb'"); insert("e18", "id, e17_id1, e17_id2, name", "1, 1, 1, 'xxx'"); insert("e18", "id, e17_id1, e17_id2, name", "2, 1, 1, 'yyy'"); insert("e18", "id, e17_id1, e17_id2, name", "3, 2, 2, 'zzz'"); Response r1 = target("/e17/e18s").matrixParam("parentId1", 1).matrixParam("parentId2", 1).request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"id\":1,\"name\":\"xxx\"},{\"id\":2,\"name\":\"yyy\"}],\"total\":2}", r1.readEntity(String.class)); } @Test public void testGet_ValidRel_ToOne_CompoundId() { insert("e17", "id1, id2, name", "1, 1, 'aaa'"); insert("e17", "id1, id2, name", "2, 2, 'bbb'"); insert("e18", "id, e17_id1, e17_id2, name", "1, 1, 1, 'xxx'"); insert("e18", "id, e17_id1, e17_id2, name", "2, 1, 1, 'yyy'"); insert("e18", "id, e17_id1, e17_id2, name", "3, 2, 2, 'zzz'"); Response r1 = target("/e18/1").queryParam("include", E18.E17.getName()).request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals( "{\"data\":[{\"id\":1," + "\"e17\":{\"id\":{\"id1\":1,\"id2\":1},\"id1\":1,\"id2\":1,\"name\":\"aaa\"}," + "\"name\":\"xxx\"}],\"total\":1}", r1.readEntity(String.class)); } @Test public void testGet_CompoundId_UnmappedPk() { // remove a part of PK from the ObjEntity DataMap dataMap = DB.getCayenneStack().getChannel().getEntityResolver().getDataMap("datamap"); ObjEntity E17 = dataMap.getObjEntity("E17"); ObjAttribute unmappedAttribute = E17.getAttribute("id2"); E17.removeAttribute("id2"); insert("e17", "id1, id2, name", "1, 1, 'aaa'"); insert("e17", "id1, id2, name", "2, 2, 'bbb'"); insert("e18", "id, e17_id1, e17_id2, name", "1, 1, 1, 'xxx'"); insert("e18", "id, e17_id1, e17_id2, name", "2, 1, 1, 'yyy'"); insert("e18", "id, e17_id1, e17_id2, name", "3, 2, 2, 'zzz'"); Response r1 = target("/e18/1").queryParam("include", E18.E17.getName()).request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"id\":1,\"" + "e17\":{\"id\":{\"id1\":1,\"id2\":1},\"id1\":1,\"name\":\"aaa\"}," + "\"name\":\"xxx\"}],\"total\":1}", r1.readEntity(String.class)); // restore initial state E17.addAttribute(unmappedAttribute); } @Test public void testGet_ValidRel_ToMany() { // make sure we have e3s for more than one e2 - this will help us // confirm that relationship queries are properly filtered. insert("e2", "id, name", "1, 'xxx'"); insert("e2", "id, name", "2, 'yyy'"); insert("e3", "id, e2_id, name", "7, 2, 'zzz'"); insert("e3", "id, e2_id, name", "8, 1, 'yyy'"); insert("e3", "id, e2_id, name", "9, 1, 'zzz'"); Response r1 = target("/e2/1/e3s").queryParam("include", "id").request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"id\":8},{\"id\":9}],\"total\":2}", r1.readEntity(String.class)); } @Test public void testGet_ValidRel_ToOne() { // make sure we have e3s for more than one e2 - this will help us // confirm that relationship queries are properly filtered. insert("e2", "id, name", "1, 'xxx'"); insert("e2", "id, name", "2, 'yyy'"); insert("e3", "id, e2_id, name", "7, 2, 'zzz'"); insert("e3", "id, e2_id, name", "8, 1, 'yyy'"); insert("e3", "id, e2_id, name", "9, 1, 'zzz'"); Response r1 = target("/e3/7/e2").queryParam("include", "id").request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"id\":2}],\"total\":1}", r1.readEntity(String.class)); } @Test public void testGet_InvalidRel() { Response r1 = target("/e2/1/dummyrel").request().get(); assertEquals(Status.BAD_REQUEST.getStatusCode(), r1.getStatus()); assertEquals("{\"success\":false,\"message\":\"Invalid relationship: 'dummyrel'\"}", r1.readEntity(String.class)); } @Test public void testGET_ToManyJoin() { insert("e12", "id", "11"); insert("e12", "id", "12"); insert("e13", "id", "14"); insert("e13", "id", "15"); insert("e13", "id", "16"); insert("e12_e13", "e12_id, e13_id", "11, 14"); insert("e12_e13", "e12_id, e13_id", "12, 16"); // excluding ID - can't render multi-column IDs yet Response r1 = target("/e12/12/e1213").queryParam("exclude", "id").queryParam("include", "e12") .queryParam("include", "e13").request().get(); assertEquals(Status.OK.getStatusCode(), r1.getStatus()); assertEquals("{\"data\":[{\"e12\":{\"id\":12},\"e13\":{\"id\":16}}],\"total\":1}", r1.readEntity(String.class)); } @Path("") public static class Resource { @Context private Configuration config; @GET @Path("e2/{id}/dummyrel") public DataResponse<E3> getE2_Dummyrel(@PathParam("id") int id, @Context UriInfo uriInfo) { return Ag.select(E3.class, config).parent(E2.class, id, "dummyrel").uri(uriInfo).get(); } @GET @Path("e2/{id}/e3s") public DataResponse<E3> getE2_E3s(@PathParam("id") int id, @Context UriInfo uriInfo) { return Ag.select(E3.class, config).parent(E2.class, id, "e3s").uri(uriInfo).get(); } @GET @Path("e2/constraints/{id}/e3s") public DataResponse<E3> getE2_E3s_Constrained(@PathParam("id") int id, @Context UriInfo uriInfo) { return Ag.select(E3.class, config).parent(E2.class, id, "e3s").uri(uriInfo) .constraint(Constraint.idOnly(E3.class)).get(); } @GET @Path("e3/{id}/e2") public DataResponse<E2> getE2OfE3(@PathParam("id") int id, @Context UriInfo uriInfo) { return Ag.select(E2.class, config).parent(E3.class, id, E3.E2).uri(uriInfo).get(); } @GET @Path("e12/{id}/e1213") public DataResponse<E12E13> get_Joins_NoId(@PathParam("id") int id, @Context UriInfo info) { return Ag.select(E12E13.class, config).toManyParent(E12.class, id, E12.E1213).uri(info).get(); } @GET @Path("e18/{id}") public DataResponse<E18> getById(@Context UriInfo uriInfo, @PathParam("id") Integer id) { return Ag.select(E18.class, config).uri(uriInfo).byId(id).getOne(); } @GET @Path("e17/e18s") public DataResponse<E18> getChildren( @Context UriInfo uriInfo, @MatrixParam("parentId1") Integer parentId1, @MatrixParam("parentId2") Integer parentId2) { Map<String, Object> parentIds = new HashMap<>(); parentIds.put(E17.ID1_PK_COLUMN, parentId1); parentIds.put(E17.ID2_PK_COLUMN, parentId2); return Ag.select(E18.class, config).parent(E17.class, parentIds, E17.E18S.getName()).uri(uriInfo).get(); } } }
package com.thingtrack.konekti.view.module.bundle.internal; import java.net.URL; import org.osgi.service.obr.Capability; import org.osgi.service.obr.Repository; import org.osgi.service.obr.Requirement; import org.osgi.service.obr.Resource; public class KonektiObrResource { private String id; private String symbolicName; private String version; private String presentationName; private Repository repository; private URL url; private Requirement[] requeriments; private Capability[] capabilities; private String[] categories; private String licenseUrl; private String description; private String documentation; private String copyright; private String sourceUrl; private Long size; private String[] keys; private Resource resource; public KonektiObrResource() { } public KonektiObrResource(String id, String symbolicName, String version, String presentationName, Repository repository, URL url, Requirement[] requeriments, Capability[] capabilities, String[] categories, String licenseUrl, String description, String documentation, String copyright, String sourceUrl, Long size, String[] keys, Resource resource) { super(); this.id = id; this.symbolicName = symbolicName; this.version = version; this.presentationName = presentationName; this.repository = repository; this.url = url; this.requeriments = requeriments; this.capabilities = capabilities; this.categories = categories; this.licenseUrl = licenseUrl; this.description = description; this.documentation = documentation; this.copyright = copyright; this.sourceUrl = sourceUrl; this.size = size; this.keys = keys; this.resource = resource; } /** * @return the id */ public String getId() { return id; } /** * @param id the id to set */ public void setId(String id) { this.id = id; } /** * @return the presentationName */ public String getPresentationName() { return presentationName; } /** * @param presentationName the presentationName to set */ public void setPresentationName(String presentationName) { this.presentationName = presentationName; } /** * @return the symbolicName */ public String getSymbolicName() { return symbolicName; } /** * @param symbolicName the symbolicName to set */ public void setSymbolicName(String symbolicName) { this.symbolicName = symbolicName; } /** * @return the version */ public String getVersion() { return version; } /** * @param version the version to set */ public void setVersion(String version) { this.version = version; } /** * @return the repository */ public Repository getRepository() { return repository; } /** * @param repository the repository to set */ public void setRepository(Repository repository) { this.repository = repository; } /** * @return the url */ public URL getUrl() { return url; } /** * @param url the url to set */ public void setUrl(URL url) { this.url = url; } /** * @return the requeriments */ public Requirement[] getRequeriments() { return requeriments; } /** * @param requeriments the requeriments to set */ public void setRequeriments(Requirement[] requeriments) { this.requeriments = requeriments; } /** * @return the capabilities */ public Capability[] getCapabilities() { return capabilities; } /** * @param capabilities the capabilities to set */ public void setCapabilities(Capability[] capabilities) { this.capabilities = capabilities; } /** * @return the licenseUrl */ public String getLicenseUrl() { return licenseUrl; } /** * @param licenseUrl the licenseUrl to set */ public void setLicenseUrl(String licenseUrl) { this.licenseUrl = licenseUrl; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the documentation */ public String getDocumentation() { return documentation; } /** * @param documentation the documentation to set */ public void setDocumentation(String documentation) { this.documentation = documentation; } /** * @return the copyright */ public String getCopyright() { return copyright; } /** * @param copyright the copyright to set */ public void setCopyright(String copyright) { this.copyright = copyright; } /** * @return the sourceUrl */ public String getSourceUrl() { return sourceUrl; } /** * @param sourceUrl the sourceUrl to set */ public void setSourceUrl(String sourceUrl) { this.sourceUrl = sourceUrl; } /** * @return the size */ public Long getSize() { return size; } /** * @param size the size to set */ public void setSize(Long size) { this.size = size; } /** * @return the keys */ public String[] getKeys() { return keys; } /** * @param keys the keys to set */ public void setKeys(String[] keys) { this.keys = keys; } /** * @return the categories */ public String[] getCategories() { return categories; } /** * @param categories the categories to set */ public void setCategories(String[] categories) { this.categories = categories; } /** * @return the resource */ public Resource getResource() { return resource; } /** * @param resource the resource to set */ public void setResource(Resource resource) { this.resource = resource; } }
// // typica - A client library for Amazon Web Services // Copyright (C) 2007 Xerox Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.xerox.amazonws.common; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.SocketException; import java.net.URISyntaxException; import java.net.URL; import java.text.Collator; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import javax.xml.bind.JAXBException; import javax.xml.bind.UnmarshalException; import org.xml.sax.SAXException; import org.apache.http.message.BasicHeader; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; import org.apache.http.util.EntityUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.HttpException; import org.apache.http.ParseException; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.params.AllClientPNames; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.conn.params.ConnPerRouteBean; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.SingleClientConnManager; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.message.BasicNameValuePair; import org.apache.http.auth.Credentials; import org.apache.http.auth.NTCredentials; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.auth.AuthScope; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.google.common.collect.MapMaker; import com.xerox.amazonws.typica.jaxb.Response; import com.xerox.amazonws.typica.sqs2.jaxb.Error; import com.xerox.amazonws.typica.sqs2.jaxb.ErrorResponse; /** * This class provides an interface with the Amazon SQS service. It provides high level * methods for listing and creating message queues. * * @author D. Kavanagh * @author developer@dotech.com */ public class AWSQueryConnection extends AWSConnection { private static final Log log = LogFactory.getLog(AWSQueryConnection.class); private static String userAgent = "typica/"; // this is the number of automatic retries private int maxRetries = 5; private HttpClient hc = null; private int maxConnections = 100; private String proxyHost = null; private int proxyPort; private String proxyUser; private String proxyPassword; private String proxyDomain; // for ntlm authentication private int connectionManagerTimeout = 0; private int soTimeout = 0; private int connectionTimeout = 0; private TimeZone serverTimeZone = TimeZone.getTimeZone("GMT"); private Map<String, String> usedSignatures; static { String version = "?"; try { Properties props = new Properties(); InputStream verStream = ClassLoader.getSystemResourceAsStream("version.properties"); try { props.load(verStream); } finally { verStream.close(); } version = props.getProperty("version"); } catch (Exception ex) { } userAgent = userAgent + version + " ("+ System.getProperty("os.arch") + "; " + System.getProperty("os.name") + ")"; } /** * Initializes the queue service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from SQS. * @param server Which host to connect to. * @param port Which port to use. */ public AWSQueryConnection(String awsAccessId, String awsSecretKey, boolean isSecure, String server, int port) { super(awsAccessId, awsSecretKey, isSecure, server, port); usedSignatures = new MapMaker().expiration(5, TimeUnit.MINUTES).makeMap(); } /** * This method returns the number of connections that can be open at once. * * @return the number of connections */ public int getMaxConnections() { return maxConnections; } /** * This method sets the number of connections that can be open at once. * * @param connections the number of connections */ public void setMaxConnections(int connections) { maxConnections = connections; hc = null; } /** * This method returns the number of times to retry when a recoverable error occurs. * * @return the number of times to retry on recoverable error */ public int getMaxRetries() { return maxRetries; } /** * This method sets the number of times to retry when a recoverable error occurs. * * @param retries the number of times to retry on recoverable error */ public void setMaxRetries(int retries) { maxRetries = retries; } /** * This method sets the proxy host and port * * @param host the proxy host * @param port the proxy port */ public void setProxyValues(String host, int port) { this.proxyHost = host; this.proxyPort = port; hc = null; } /** * This method sets the proxy host, port, user and password (for authenticating proxies) * * @param host the proxy host * @param port the proxy port * @param user the proxy user * @param password the proxy password */ public void setProxyValues(String host, int port, String user, String password) { this.proxyHost = host; this.proxyPort = port; this.proxyUser = user; this.proxyPassword = password; hc = null; } /** * This method sets the proxy host, port, user, password and domain (for NTLM authentication) * * @param host the proxy host * @param port the proxy port * @param user the proxy user * @param password the proxy password * @param domain the proxy domain */ public void setProxyValues(String host, int port, String user, String password, String domain) { this.proxyHost = host; this.proxyPort = port; this.proxyUser = user; this.proxyPassword = password; this.proxyDomain = domain; hc = null; } /** * This method indicates the system properties should be used for proxy settings. These * properties are http.proxyHost, http.proxyPort, http.proxyUser and http.proxyPassword */ public void useSystemProxy() { this.proxyHost = System.getProperty("http.proxyHost"); if (this.proxyHost != null && this.proxyHost.trim().equals("")) { proxyHost = null; } this.proxyPort = getPort(); try { this.proxyPort = Integer.parseInt(System.getProperty("http.proxyPort")); } catch (NumberFormatException ex) { /* use default */ } this.proxyUser = System.getProperty("http.proxyUser"); this.proxyPassword = System.getProperty("http.proxyPassword"); this.proxyDomain = System.getProperty("http.proxyDomain"); hc = null; } /** * @see org.apache.http.params.HttpClientParams.getConnectionManagerTimeout() * @return connection manager timeout in milliseconds */ public int getConnectionManagerTimeout() { return connectionManagerTimeout; } /** * @see org.apache.http.params.HttpClientParams.getConnectionManagerTimeout() * @param connection manager timeout in milliseconds */ public void setConnectionManagerTimeout(int timeout) { connectionManagerTimeout = timeout; hc = null; } /** * @see org.apache.http.params.HttpConnectionParams.getSoTimeout() * @see org.apache.http.params.HttpMethodParams.getSoTimeout() * @return socket timeout in milliseconds */ public int getSoTimeout() { return soTimeout; } /** * @see org.apache.http.params.HttpConnectionParams.getSoTimeout() * @see org.apache.http.params.HttpMethodParams.getSoTimeout() * @param socket timeout in milliseconds */ public void setSoTimeout(int timeout) { soTimeout = timeout; hc = null; } /** * @see org.apache.http.params.HttpConnectionParams.getConnectionTimeout() * @return connection timeout in milliseconds */ public int getConnectionTimeout() { return connectionTimeout; } /** * @see org.apache.http.params.HttpConnectionParams.getConnectionTimeout() * @param connection timeout in milliseconds */ public void setConnectionTimeout(int timeout) { connectionTimeout = timeout; hc = null; } /** * This method returns the map of headers for this connection * * @return map of headers (modifiable) */ public Map<String, List<String>> getHeaders() { return headers; } /** * Returns timezone used when creating requests. This is helpful when talking to servers * running in different timezones. Specifically when typica talks with a private Eucalyptus * cluster. * * @return server timezone setting */ public TimeZone getServerTimeZone() { return serverTimeZone; } /** * Allows setting non-standard server timezone. (see getter comments) * * @param serverTimeZone new timezone of server */ public void setServerTimeZone(TimeZone serverTimeZone) { this.serverTimeZone = serverTimeZone; } protected HttpClient getHttpClient() { if (hc == null) { configureHttpClient(); } return hc; } public void setHttpClient(HttpClient hc) { this.hc = hc; } /** * Make a http request and process the response. This method also performs automatic retries. * * @param method The HTTP method to use (GET, POST, DELETE, etc) * @param action the name of the action for this query request * @param params map of request params * @param respType the class that represents the desired/expected return type */ public <T> T makeRequest(HttpRequestBase method, String action, Map<String, String> params, Class<T> respType) throws HttpException, IOException, JAXBException, AWSException, SAXException { // add auth params, and protocol specific headers Map<String, String> qParams = populateParams(action, params); // sort params by key ArrayList<String> keys = sortKeys(qParams); String signatureString = buildSignatureString(method, qParams, keys); //System.err.println("String to sign :"+resource.toString()); // calculate signature String unencoded = encode(getSecretAccessKey(), signatureString.toString(), false); String encoded = urlencode(unencoded); // wait until we have a signature that hasn't already been used while (usedSignatures.containsKey(encoded)) { try { Thread.sleep(1000); } catch (InterruptedException e) { } setTimestamp(qParams); signatureString = buildSignatureString(method, qParams, keys); unencoded = encode(getSecretAccessKey(), signatureString.toString(), false); encoded = urlencode(unencoded); } usedSignatures.put(encoded, encoded); //System.err.println("sig = "+encoded); // build param string, encoding values and adding request signature StringBuilder resource = new StringBuilder(); if (method.getMethod().equals("POST")) { ArrayList<BasicNameValuePair> postParams = new ArrayList<BasicNameValuePair>(); for (String key : keys) { postParams.add(new BasicNameValuePair(key, qParams.get(key))); } postParams.add(new BasicNameValuePair("Signature", unencoded)); UrlEncodedFormEntity entity = new UrlEncodedFormEntity(postParams, "UTF-8"); method.setHeader(new BasicHeader("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")); ((HttpPost)method).setEntity(entity); } else { for (String key : keys) { resource.append("&"); resource.append(key); resource.append("="); resource.append(urlencode(qParams.get(key))); } resource.setCharAt(0, '?'); // set first param delimeter resource.append("&Signature="); resource.append(encoded); } // finally, build request object URL url = makeURL(resource.toString()); try { method.setURI(new java.net.URI(url.toString())); } catch (URISyntaxException ex) { throw new RuntimeException(ex); } method.setHeader(new BasicHeader("User-Agent", userAgent)); if (getSignatureVersion() == 0) { method.setHeader(new BasicHeader("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")); } Object response = null; boolean done = false; int retries = 0; boolean doRetry = false; AWSException error = null; HttpResponse httpResponse = null; do { int responseCode = 600; // default to high value, so we don't think it is valid try { httpResponse = getHttpClient().execute(method); responseCode = httpResponse.getStatusLine().getStatusCode(); } catch (SocketException ex) { // these can generally be retried. Treat it like a 500 error doRetry = true; error = new AWSException(ex.getMessage(), ex); } // 100's are these are handled by httpclient if (responseCode < 300) { // 200's : parse normal response into requested object if (respType != null) { InputStream iStr = httpResponse.getEntity().getContent(); response = JAXBuddy.deserializeXMLStream(respType, iStr); } done = true; } else if (responseCode < 400) { // 300's : what to do? throw new HttpException("redirect error : "+responseCode); } else if (responseCode < 500) { // 400's : parse client error message String body = getString(httpResponse.getEntity()); throw createException(body, "Client error : "); } else if (responseCode < 600) { // 500's : retry... doRetry = true; String body = getString(httpResponse.getEntity()); error = createException(body, ""); } if (doRetry) { retries++; if (retries > maxRetries) { throw new HttpException("Number of retries exceeded : "+action, error); } doRetry = false; try { Thread.sleep((long)(Math.random() * (Math.pow(4, (retries-1))*100L))); } catch (InterruptedException ex) {} } } while (!done); return (T)response; } private String buildSignatureString(HttpRequestBase method, Map<String, String> qParams, ArrayList<String> keys) throws MalformedURLException { StringBuilder resource = new StringBuilder(); if (getSignatureVersion() == 0) { // ensure Action, Timestamp come first! resource.append(qParams.get("Action")); resource.append(qParams.get("Timestamp")); } else if (getSignatureVersion() == 2) { resource.append(method.getMethod()); resource.append("\n"); resource.append(getServer().toLowerCase()); resource.append("\n/"); String reqURL = makeURL("").toString(); // see if there is something after the host:port/ in the URL if (reqURL.lastIndexOf('/') < (reqURL.length()-1)) { // if so, put that here in the string to sign // make sure we slice and dice at the right '/' int idx = reqURL.lastIndexOf(':'); resource.append(reqURL.substring(reqURL.indexOf('/', idx)+1)); } resource.append("\n"); boolean first = true; for (String key : keys) { if (!first) { resource.append("&"); } else { first = false; } resource.append(key); resource.append("="); resource.append(urlencode(qParams.get(key))); // System.err.println("encoded params "+key+" :"+(urlencode(qParams.get(key)))); } } else { for (String key : keys) { resource.append(key); resource.append(qParams.get(key)); } } return resource.toString(); } private ArrayList<String> sortKeys(Map<String, String> qParams) { ArrayList<String> keys = new ArrayList<String>(qParams.keySet()); if (getSignatureVersion() == 2) { Collections.sort(keys); } else { Collator stringCollator = Collator.getInstance(); stringCollator.setStrength(Collator.PRIMARY); Collections.sort(keys, stringCollator); } return keys; } private Map<String, String> populateParams(String action, Map<String, String> params) { Map<String, String> qParams; if (params != null) { qParams = new HashMap<String, String>(params); } else { qParams = new HashMap<String, String>(); } qParams.put("Action", action); qParams.put("AWSAccessKeyId", getAwsAccessKeyId()); qParams.put("SignatureVersion", ""+getSignatureVersion()); setTimestamp(qParams); if (getSignatureVersion() == 2) { qParams.put("SignatureMethod", getAlgorithm()); } if (headers != null) { for (Iterator<String> i = headers.keySet().iterator(); i.hasNext(); ) { String key = i.next(); for (Iterator<String> j = headers.get(key).iterator(); j.hasNext(); ) { qParams.put(key, j.next()); } } } return qParams; } private void setTimestamp(Map<String, String> qParams) { qParams.put("Timestamp", httpDate(serverTimeZone)); } private void configureHttpClient() { HttpParams params = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout(params, connectionTimeout); HttpConnectionParams.setSoTimeout(params, soTimeout); params.setParameter(AllClientPNames.MAX_TOTAL_CONNECTIONS, new Integer(maxConnections)); params.setParameter(AllClientPNames.VIRTUAL_HOST, getServer()); params.setParameter(AllClientPNames.MAX_CONNECTIONS_PER_ROUTE, new ConnPerRouteBean(maxConnections)); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); registry.register(new Scheme("https", SSLSocketFactory.getSocketFactory(), 443)); registry.register(new Scheme("https", SSLSocketFactory.getSocketFactory(), 8773)); ThreadSafeClientConnManager connMgr = new ThreadSafeClientConnManager(params, registry); //SingleClientConnManager connMgr = new SingleClientConnManager(params, registry); hc = new TypicaHttpClient(connMgr, params); //hc = new DefaultHttpClient(connMgr, params); if (proxyHost != null) { DefaultHttpClient defaultHC = (DefaultHttpClient) hc; log.info("Proxy Host set to "+proxyHost+":"+proxyPort); HttpHost proxy = new HttpHost(proxyHost, proxyPort); defaultHC.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); if (proxyUser != null && !proxyUser.trim().equals("")) { AuthScope scope = new AuthScope(proxyHost, proxyPort); Credentials creds = null; if (proxyDomain != null) { creds = new NTCredentials(proxyUser, proxyPassword, proxyHost, proxyDomain); } else { creds = new UsernamePasswordCredentials(proxyUser, proxyPassword); } defaultHC.getCredentialsProvider().setCredentials(scope, creds); } } } protected String getString(HttpEntity entity) { if (entity == null) { return null; } else { try { return EntityUtils.toString(entity); } catch (Exception e) { throw new RuntimeException(e); } } } protected void close(HttpEntity entity) { if (entity != null) { try { entity.consumeContent(); } catch (Exception ignore) { // ignored } } } protected void close(InputStream istream) { if (istream != null) { try { istream.close(); } catch (Exception ignored) { // ignored } } } /** * This method creates a detail packed exception to pass up */ private AWSException createException(String errorResponse, String msgPrefix) throws IOException, JAXBException, SAXException { String errorMsg; String requestId; List<AWSError> errors = null; ByteArrayInputStream bais = new ByteArrayInputStream(errorResponse.getBytes()); if (errorResponse.indexOf("<ErrorResponse") > -1) { try { // this comes from the SQS2 schema, and is the standard new response ErrorResponse resp = JAXBuddy.deserializeXMLStream(ErrorResponse.class, bais); List<Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestId(); errors = new ArrayList<AWSError>(); for (Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex) { try { // this comes from the DevpayLS schema, duplicated because of the different namespace bais = new ByteArrayInputStream(errorResponse.getBytes()); com.xerox.amazonws.typica.jaxb.ErrorResponse resp = JAXBuddy.deserializeXMLStream(com.xerox.amazonws.typica.jaxb.ErrorResponse.class, bais); List<com.xerox.amazonws.typica.jaxb.Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestID(); errors = new ArrayList<AWSError>(); for (com.xerox.amazonws.typica.jaxb.Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex2) { try { // this comes from the Monitoring schema, duplicated because of the different namespace bais = new ByteArrayInputStream(errorResponse.getBytes()); com.xerox.amazonws.typica.monitor.jaxb.ErrorResponse resp = JAXBuddy.deserializeXMLStream(com.xerox.amazonws.typica.monitor.jaxb.ErrorResponse.class, bais); List<com.xerox.amazonws.typica.monitor.jaxb.Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestId(); errors = new ArrayList<AWSError>(); for (com.xerox.amazonws.typica.monitor.jaxb.Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex3) { try { // this comes from the ELB schema, duplicated because of the different namespace bais = new ByteArrayInputStream(errorResponse.getBytes()); com.xerox.amazonws.typica.loadbalance.jaxb.ErrorResponse resp = JAXBuddy.deserializeXMLStream(com.xerox.amazonws.typica.loadbalance.jaxb.ErrorResponse.class, bais); List<com.xerox.amazonws.typica.loadbalance.jaxb.Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestId(); errors = new ArrayList<AWSError>(); for (com.xerox.amazonws.typica.loadbalance.jaxb.Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex4) { try { // this comes from the scaling schema, duplicated because of the different namespace bais = new ByteArrayInputStream(errorResponse.getBytes()); com.xerox.amazonws.typica.autoscale.jaxb.ErrorResponse resp = JAXBuddy.deserializeXMLStream(com.xerox.amazonws.typica.autoscale.jaxb.ErrorResponse.class, bais); List<com.xerox.amazonws.typica.autoscale.jaxb.Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestId(); errors = new ArrayList<AWSError>(); for (com.xerox.amazonws.typica.autoscale.jaxb.Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex5) { try { // this comes from the notification schema, duplicated because of the different namespace bais = new ByteArrayInputStream(errorResponse.getBytes()); com.xerox.amazonws.typica.sns.jaxb.ErrorResponse resp = JAXBuddy.deserializeXMLStream(com.xerox.amazonws.typica.sns.jaxb.ErrorResponse.class, bais); List<com.xerox.amazonws.typica.sns.jaxb.Error> errs = resp.getErrors(); errorMsg = "("+errs.get(0).getCode()+") "+errs.get(0).getMessage(); requestId = resp.getRequestId(); errors = new ArrayList<AWSError>(); for (com.xerox.amazonws.typica.sns.jaxb.Error e : errs) { errors.add(new AWSError(AWSError.ErrorType.getTypeFromString(e.getType()), e.getCode(), e.getMessage())); } } catch (UnmarshalException ex6) { errorMsg = "Couldn't parse error response!"; requestId = "???"; log.error(errorMsg, ex6); log.info("response = "+errorResponse); } } } } } } } else { // this clause to parse Eucalyptus errors, until they get with the program! if (errorResponse.indexOf("<soapenv:Reason") > -1) { int idx = errorResponse.indexOf("Text xml:lang=\"en-US\">"); errorMsg = errorResponse.substring(idx+22); // this number tied to string in line above int idx2 = errorMsg.indexOf("<"); errorMsg = errorMsg.substring(0, idx2); requestId = "NA"; errors = new ArrayList<AWSError>(); errors.add(new AWSError(AWSError.ErrorType.SENDER, "unknown", errorMsg)); } else { try { Response resp = JAXBuddy.deserializeXMLStream(Response.class, bais); String errorCode = resp.getErrors().getError().getCode(); errorMsg = resp.getErrors().getError().getMessage(); requestId = resp.getRequestID(); if (errorCode != null && !errorCode.trim().equals("")) { errors = new ArrayList<AWSError>(); errors.add(new AWSError(AWSError.ErrorType.SENDER, errorCode, errorMsg)); } } catch (SAXException ex) { errorMsg = "Couldn't parse error response!"; requestId = "???"; log.error(errorMsg, ex); log.info("response = "+errorResponse); } catch (UnmarshalException ex2) { errorMsg = "Couldn't parse error response!"; requestId = "???"; log.error(errorMsg, ex2); log.info("response = "+errorResponse); } } } return new AWSException(msgPrefix + errorMsg, requestId, errors); } /** * Generate an rfc822 date for use in the Date HTTP header. */ private static String httpDate(TimeZone serverTimeZone) { //final String DateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'"; final String DateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'"; SimpleDateFormat format = new SimpleDateFormat( DateFormat, Locale.US ); format.setTimeZone(serverTimeZone); return format.format( new Date() ); } protected String httpDate(Calendar date) { final String DateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'"; SimpleDateFormat format = new SimpleDateFormat(DateFormat, Locale.US); format.setTimeZone(serverTimeZone); return format.format(date.getTime()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllField; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; /** * */ public class AllFieldMapper extends MetadataFieldMapper { public static final String NAME = "_all"; public static final String CONTENT_TYPE = "_all"; public static class Defaults { public static final String NAME = AllFieldMapper.NAME; public static final String INDEX_NAME = AllFieldMapper.NAME; public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED; public static final int POSITION_INCREMENT_GAP = 100; public static final MappedFieldType FIELD_TYPE = new AllFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); FIELD_TYPE.setTokenized(true); FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); } } public static class Builder extends MetadataFieldMapper.Builder<Builder, AllFieldMapper> { private EnabledAttributeMapper enabled = Defaults.ENABLED; public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); builder = this; } public Builder enabled(EnabledAttributeMapper enabled) { this.enabled = enabled; return this; } @Override public AllFieldMapper build(BuilderContext context) { // In case the mapping overrides these // TODO: this should be an exception! it doesnt make sense to not index this field if (fieldType.indexOptions() == IndexOptions.NONE) { fieldType.setIndexOptions(Defaults.FIELD_TYPE.indexOptions()); } else { fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), Defaults.POSITION_INCREMENT_GAP)); fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), Defaults.POSITION_INCREMENT_GAP)); fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), Defaults.POSITION_INCREMENT_GAP)); } fieldType.setTokenized(true); return new AllFieldMapper(fieldType, enabled, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer()); builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer()); // parseField below will happily parse the doc_values setting, but it is then never passed to // the AllFieldMapper ctor in the builder since it is not valid. Here we validate // the doc values settings (old and new) are rejected Object docValues = node.get("doc_values"); if (docValues != null && lenientNodeBooleanValue(docValues)) { throw new MapperParsingException("Field [" + name + "] is always tokenized and cannot have doc values"); } // convoluted way of specifying doc values Object fielddata = node.get("fielddata"); if (fielddata != null) { Map<String, Object> fielddataMap = nodeMapValue(fielddata, "fielddata"); Object format = fielddataMap.get("format"); if ("doc_values".equals(format)) { throw new MapperParsingException("Field [" + name + "] is always tokenized and cannot have doc values"); } } parseTextField(builder, builder.name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } } return builder; } @Override public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { return new AllFieldMapper(indexSettings, fieldType); } } static final class AllFieldType extends StringFieldType { public AllFieldType() { } protected AllFieldType(AllFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new AllFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public Query queryStringTermQuery(Term term) { return new AllTermQuery(term); } @Override public Query termQuery(Object value, QueryShardContext context) { return queryStringTermQuery(new Term(name(), indexedValueForSearch(value))); } } private EnabledAttributeMapper enabledState; private AllFieldMapper(Settings indexSettings, MappedFieldType existing) { this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.ENABLED, indexSettings); } private AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.enabledState = enabled; } public boolean enabled() { return this.enabledState.enabled; } @Override public void preParse(ParseContext context) throws IOException { } @Override public void postParse(ParseContext context) throws IOException { super.parse(context); } @Override public Mapper parse(ParseContext context) throws IOException { // we parse in post parse return null; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { if (!enabledState.enabled) { return; } for (AllEntries.Entry entry : context.allEntries().entries()) { fields.add(new AllField(fieldType().name(), entry.value(), entry.boost(), fieldType())); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); if (!includeDefaults) { // simulate the generation to make sure we don't add unnecessary content if all is default // if all are defaults, no need to write it at all - generating is twice is ok though BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0); XContentBuilder b = new XContentBuilder(builder.contentType().xContent(), bytesStreamOutput); b.startObject().flush(); long pos = bytesStreamOutput.position(); innerToXContent(b, false); b.flush(); if (pos == bytesStreamOutput.position()) { return builder; } } builder.startObject(CONTENT_TYPE); innerToXContent(builder, includeDefaults); builder.endObject(); return builder; } private void innerToXContent(XContentBuilder builder, boolean includeDefaults) throws IOException { if (includeDefaults || enabledState != Defaults.ENABLED) { builder.field("enabled", enabledState.enabled); } if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) { builder.field("store", fieldType().stored()); } if (includeDefaults || fieldType().storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) { builder.field("store_term_vectors", fieldType().storeTermVectors()); } if (includeDefaults || fieldType().storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) { builder.field("store_term_vector_offsets", fieldType().storeTermVectorOffsets()); } if (includeDefaults || fieldType().storeTermVectorPositions() != Defaults.FIELD_TYPE.storeTermVectorPositions()) { builder.field("store_term_vector_positions", fieldType().storeTermVectorPositions()); } if (includeDefaults || fieldType().storeTermVectorPayloads() != Defaults.FIELD_TYPE.storeTermVectorPayloads()) { builder.field("store_term_vector_payloads", fieldType().storeTermVectorPayloads()); } if (includeDefaults || fieldType().omitNorms() != Defaults.FIELD_TYPE.omitNorms()) { builder.field("norms", !fieldType().omitNorms()); } doXContentAnalyzers(builder, includeDefaults); if (fieldType().similarity() != null) { builder.field("similarity", fieldType().similarity().name()); } else if (includeDefaults) { builder.field("similarity", SimilarityService.DEFAULT_SIMILARITY); } } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { throw new IllegalArgumentException("mapper [" + fieldType().name() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } super.doMerge(mergeWith, updateAllTypes); } @Override public boolean isGenerated() { return true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; import org.apache.hadoop.yarn.util.resource.Resources; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.SAXException; import com.google.common.annotations.VisibleForTesting; @Public @Unstable public class AllocationFileLoaderService extends AbstractService { public static final Log LOG = LogFactory.getLog( AllocationFileLoaderService.class.getName()); /** Time to wait between checks of the allocation file */ public static final long ALLOC_RELOAD_INTERVAL_MS = 10 * 1000; /** * Time to wait after the allocation has been modified before reloading it * (this is done to prevent loading a file that hasn't been fully written). */ public static final long ALLOC_RELOAD_WAIT_MS = 5 * 1000; public static final long THREAD_JOIN_TIMEOUT_MS = 1000; private final Clock clock; private long lastSuccessfulReload; // Last time we successfully reloaded queues private boolean lastReloadAttemptFailed = false; // Path to XML file containing allocations. private File allocFile; private Listener reloadListener; @VisibleForTesting long reloadIntervalMs = ALLOC_RELOAD_INTERVAL_MS; private Thread reloadThread; private volatile boolean running = true; public AllocationFileLoaderService() { this(new SystemClock()); } public AllocationFileLoaderService(Clock clock) { super(AllocationFileLoaderService.class.getName()); this.clock = clock; } @Override public void serviceInit(Configuration conf) throws Exception { this.allocFile = getAllocationFile(conf); if (allocFile != null) { reloadThread = new Thread() { @Override public void run() { while (running) { long time = clock.getTime(); long lastModified = allocFile.lastModified(); if (lastModified > lastSuccessfulReload && time > lastModified + ALLOC_RELOAD_WAIT_MS) { try { reloadAllocations(); } catch (Exception ex) { if (!lastReloadAttemptFailed) { LOG.error("Failed to reload fair scheduler config file - " + "will use existing allocations.", ex); } lastReloadAttemptFailed = true; } } else if (lastModified == 0l) { if (!lastReloadAttemptFailed) { LOG.warn("Failed to reload fair scheduler config file because" + " last modified returned 0. File exists: " + allocFile.exists()); } lastReloadAttemptFailed = true; } try { Thread.sleep(reloadIntervalMs); } catch (InterruptedException ex) { LOG.info( "Interrupted while waiting to reload alloc configuration"); } } } }; reloadThread.setName("AllocationFileReloader"); reloadThread.setDaemon(true); } super.serviceInit(conf); } @Override public void serviceStart() throws Exception { if (reloadThread != null) { reloadThread.start(); } super.serviceStart(); } @Override public void serviceStop() throws Exception { running = false; if (reloadThread != null) { reloadThread.interrupt(); try { reloadThread.join(THREAD_JOIN_TIMEOUT_MS); } catch (InterruptedException e) { LOG.warn("reloadThread fails to join."); } } super.serviceStop(); } /** * Path to XML file containing allocations. If the * path is relative, it is searched for in the * classpath, but loaded like a regular File. */ public File getAllocationFile(Configuration conf) { String allocFilePath = conf.get(FairSchedulerConfiguration.ALLOCATION_FILE, FairSchedulerConfiguration.DEFAULT_ALLOCATION_FILE); File allocFile = new File(allocFilePath); if (!allocFile.isAbsolute()) { URL url = Thread.currentThread().getContextClassLoader() .getResource(allocFilePath); if (url == null) { LOG.warn(allocFilePath + " not found on the classpath."); allocFile = null; } else if (!url.getProtocol().equalsIgnoreCase("file")) { throw new RuntimeException("Allocation file " + url + " found on the classpath is not on the local filesystem."); } else { allocFile = new File(url.getPath()); } } return allocFile; } public synchronized void setReloadListener(Listener reloadListener) { this.reloadListener = reloadListener; } /** * Updates the allocation list from the allocation config file. This file is * expected to be in the XML format specified in the design doc. * * @throws IOException if the config file cannot be read. * @throws AllocationConfigurationException if allocations are invalid. * @throws ParserConfigurationException if XML parser is misconfigured. * @throws SAXException if config file is malformed. */ public synchronized void reloadAllocations() throws IOException, ParserConfigurationException, SAXException, AllocationConfigurationException { if (allocFile == null) { return; } LOG.info("Loading allocation file " + allocFile); // Create some temporary hashmaps to hold the new allocs, and we only save // them in our fields if we have parsed the entire allocs file successfully. Map<String, Resource> minQueueResources = new HashMap<String, Resource>(); Map<String, Resource> maxQueueResources = new HashMap<String, Resource>(); Map<String, Integer> queueMaxApps = new HashMap<String, Integer>(); Map<String, Integer> userMaxApps = new HashMap<String, Integer>(); Map<String, Float> queueMaxAMShares = new HashMap<String, Float>(); Map<String, ResourceWeights> queueWeights = new HashMap<String, ResourceWeights>(); Map<String, SchedulingPolicy> queuePolicies = new HashMap<String, SchedulingPolicy>(); Map<String, Long> minSharePreemptionTimeouts = new HashMap<String, Long>(); Map<String, Long> fairSharePreemptionTimeouts = new HashMap<String, Long>(); Map<String, Float> fairSharePreemptionThresholds = new HashMap<String, Float>(); Map<String, Map<QueueACL, AccessControlList>> queueAcls = new HashMap<String, Map<QueueACL, AccessControlList>>(); int userMaxAppsDefault = Integer.MAX_VALUE; int queueMaxAppsDefault = Integer.MAX_VALUE; float queueMaxAMShareDefault = 0.5f; long defaultFairSharePreemptionTimeout = Long.MAX_VALUE; long defaultMinSharePreemptionTimeout = Long.MAX_VALUE; float defaultFairSharePreemptionThreshold = 0.5f; SchedulingPolicy defaultSchedPolicy = SchedulingPolicy.DEFAULT_POLICY; QueuePlacementPolicy newPlacementPolicy = null; // Remember all queue names so we can display them on web UI, etc. // configuredQueues is segregated based on whether it is a leaf queue // or a parent queue. This information is used for creating queues // and also for making queue placement decisions(QueuePlacementRule.java). Map<FSQueueType, Set<String>> configuredQueues = new HashMap<FSQueueType, Set<String>>(); for (FSQueueType queueType : FSQueueType.values()) { configuredQueues.put(queueType, new HashSet<String>()); } // Read and parse the allocations file. DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); docBuilderFactory.setIgnoringComments(true); DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); Document doc = builder.parse(allocFile); Element root = doc.getDocumentElement(); if (!"allocations".equals(root.getTagName())) throw new AllocationConfigurationException("Bad fair scheduler config " + "file: top-level element not <allocations>"); NodeList elements = root.getChildNodes(); List<Element> queueElements = new ArrayList<Element>(); Element placementPolicyElement = null; for (int i = 0; i < elements.getLength(); i++) { Node node = elements.item(i); if (node instanceof Element) { Element element = (Element)node; if ("queue".equals(element.getTagName()) || "pool".equals(element.getTagName())) { queueElements.add(element); } else if ("user".equals(element.getTagName())) { String userName = element.getAttribute("name"); NodeList fields = element.getChildNodes(); for (int j = 0; j < fields.getLength(); j++) { Node fieldNode = fields.item(j); if (!(fieldNode instanceof Element)) continue; Element field = (Element) fieldNode; if ("maxRunningApps".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); userMaxApps.put(userName, val); } } } else if ("userMaxAppsDefault".equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); userMaxAppsDefault = val; } else if ("defaultFairSharePreemptionTimeout" .equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); long val = Long.parseLong(text) * 1000L; defaultFairSharePreemptionTimeout = val; } else if ("fairSharePreemptionTimeout".equals(element.getTagName())) { if (defaultFairSharePreemptionTimeout == Long.MAX_VALUE) { String text = ((Text)element.getFirstChild()).getData().trim(); long val = Long.parseLong(text) * 1000L; defaultFairSharePreemptionTimeout = val; } } else if ("defaultMinSharePreemptionTimeout" .equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); long val = Long.parseLong(text) * 1000L; defaultMinSharePreemptionTimeout = val; } else if ("defaultFairSharePreemptionThreshold" .equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); float val = Float.parseFloat(text); val = Math.max(Math.min(val, 1.0f), 0.0f); defaultFairSharePreemptionThreshold = val; } else if ("queueMaxAppsDefault".equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); queueMaxAppsDefault = val; } else if ("queueMaxAMShareDefault".equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); float val = Float.parseFloat(text); val = Math.min(val, 1.0f); queueMaxAMShareDefault = val; } else if ("defaultQueueSchedulingPolicy".equals(element.getTagName()) || "defaultQueueSchedulingMode".equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); defaultSchedPolicy = SchedulingPolicy.parse(text); } else if ("queuePlacementPolicy".equals(element.getTagName())) { placementPolicyElement = element; } else { LOG.warn("Bad element in allocations file: " + element.getTagName()); } } } // Load queue elements. A root queue can either be included or omitted. If // it's included, all other queues must be inside it. for (Element element : queueElements) { String parent = "root"; if (element.getAttribute("name").equalsIgnoreCase("root")) { if (queueElements.size() > 1) { throw new AllocationConfigurationException("If configuring root queue," + " no other queues can be placed alongside it."); } parent = null; } loadQueue(parent, element, minQueueResources, maxQueueResources, queueMaxApps, userMaxApps, queueMaxAMShares, queueWeights, queuePolicies, minSharePreemptionTimeouts, fairSharePreemptionTimeouts, fairSharePreemptionThresholds, queueAcls, configuredQueues); } // Load placement policy and pass it configured queues Configuration conf = getConfig(); if (placementPolicyElement != null) { newPlacementPolicy = QueuePlacementPolicy.fromXml(placementPolicyElement, configuredQueues, conf); } else { newPlacementPolicy = QueuePlacementPolicy.fromConfiguration(conf, configuredQueues); } // Set the min/fair share preemption timeout for the root queue if (!minSharePreemptionTimeouts.containsKey(QueueManager.ROOT_QUEUE)){ minSharePreemptionTimeouts.put(QueueManager.ROOT_QUEUE, defaultMinSharePreemptionTimeout); } if (!fairSharePreemptionTimeouts.containsKey(QueueManager.ROOT_QUEUE)) { fairSharePreemptionTimeouts.put(QueueManager.ROOT_QUEUE, defaultFairSharePreemptionTimeout); } // Set the fair share preemption threshold for the root queue if (!fairSharePreemptionThresholds.containsKey(QueueManager.ROOT_QUEUE)) { fairSharePreemptionThresholds.put(QueueManager.ROOT_QUEUE, defaultFairSharePreemptionThreshold); } AllocationConfiguration info = new AllocationConfiguration(minQueueResources, maxQueueResources, queueMaxApps, userMaxApps, queueWeights, queueMaxAMShares, userMaxAppsDefault, queueMaxAppsDefault, queueMaxAMShareDefault, queuePolicies, defaultSchedPolicy, minSharePreemptionTimeouts, fairSharePreemptionTimeouts, fairSharePreemptionThresholds, queueAcls, newPlacementPolicy, configuredQueues); lastSuccessfulReload = clock.getTime(); lastReloadAttemptFailed = false; reloadListener.onReload(info); } /** * Loads a queue from a queue element in the configuration file */ private void loadQueue(String parentName, Element element, Map<String, Resource> minQueueResources, Map<String, Resource> maxQueueResources, Map<String, Integer> queueMaxApps, Map<String, Integer> userMaxApps, Map<String, Float> queueMaxAMShares, Map<String, ResourceWeights> queueWeights, Map<String, SchedulingPolicy> queuePolicies, Map<String, Long> minSharePreemptionTimeouts, Map<String, Long> fairSharePreemptionTimeouts, Map<String, Float> fairSharePreemptionThresholds, Map<String, Map<QueueACL, AccessControlList>> queueAcls, Map<FSQueueType, Set<String>> configuredQueues) throws AllocationConfigurationException { String queueName = element.getAttribute("name"); if (parentName != null) { queueName = parentName + "." + queueName; } Map<QueueACL, AccessControlList> acls = new HashMap<QueueACL, AccessControlList>(); NodeList fields = element.getChildNodes(); boolean isLeaf = true; for (int j = 0; j < fields.getLength(); j++) { Node fieldNode = fields.item(j); if (!(fieldNode instanceof Element)) continue; Element field = (Element) fieldNode; if ("minResources".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); Resource val = FairSchedulerConfiguration.parseResourceConfigValue(text); minQueueResources.put(queueName, val); } else if ("maxResources".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); Resource val = FairSchedulerConfiguration.parseResourceConfigValue(text); maxQueueResources.put(queueName, val); } else if ("maxRunningApps".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); queueMaxApps.put(queueName, val); } else if ("maxAMShare".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); float val = Float.parseFloat(text); val = Math.min(val, 1.0f); queueMaxAMShares.put(queueName, val); } else if ("weight".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); double val = Double.parseDouble(text); queueWeights.put(queueName, new ResourceWeights((float)val)); } else if ("minSharePreemptionTimeout".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); long val = Long.parseLong(text) * 1000L; minSharePreemptionTimeouts.put(queueName, val); } else if ("fairSharePreemptionTimeout".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); long val = Long.parseLong(text) * 1000L; fairSharePreemptionTimeouts.put(queueName, val); } else if ("fairSharePreemptionThreshold".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); float val = Float.parseFloat(text); val = Math.max(Math.min(val, 1.0f), 0.0f); fairSharePreemptionThresholds.put(queueName, val); } else if ("schedulingPolicy".equals(field.getTagName()) || "schedulingMode".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); SchedulingPolicy policy = SchedulingPolicy.parse(text); queuePolicies.put(queueName, policy); } else if ("aclSubmitApps".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData(); acls.put(QueueACL.SUBMIT_APPLICATIONS, new AccessControlList(text)); } else if ("aclAdministerApps".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData(); acls.put(QueueACL.ADMINISTER_QUEUE, new AccessControlList(text)); } else if ("queue".endsWith(field.getTagName()) || "pool".equals(field.getTagName())) { loadQueue(queueName, field, minQueueResources, maxQueueResources, queueMaxApps, userMaxApps, queueMaxAMShares, queueWeights, queuePolicies, minSharePreemptionTimeouts, fairSharePreemptionTimeouts, fairSharePreemptionThresholds, queueAcls, configuredQueues); configuredQueues.get(FSQueueType.PARENT).add(queueName); isLeaf = false; } } if (isLeaf) { // if a leaf in the alloc file is marked as type='parent' // then store it under 'parent' if ("parent".equals(element.getAttribute("type"))) { configuredQueues.get(FSQueueType.PARENT).add(queueName); } else { configuredQueues.get(FSQueueType.LEAF).add(queueName); } } queueAcls.put(queueName, acls); if (maxQueueResources.containsKey(queueName) && minQueueResources.containsKey(queueName) && !Resources.fitsIn(minQueueResources.get(queueName), maxQueueResources.get(queueName))) { LOG.warn( String.format( "Queue %s has max resources %s less than min resources %s", queueName, maxQueueResources.get(queueName), minQueueResources.get(queueName))); } } public interface Listener { public void onReload(AllocationConfiguration info); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.lib.join; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import javax.validation.constraints.NotNull; import org.apache.hadoop.classification.InterfaceStability; import com.datatorrent.api.AutoMetric; import com.datatorrent.api.Context; import com.datatorrent.api.DefaultInputPort; import com.datatorrent.api.DefaultOutputPort; import com.datatorrent.api.Operator; import com.datatorrent.api.annotation.InputPortFieldAnnotation; import com.datatorrent.common.util.BaseOperator; /** * <p> * This is the base implementation of join operator. Operator receives tuples from two streams, * applies the join operation based on constraint and emit the joined value. * Subclasses should provide implementation to createOutputTuple,copyValue, getKeyValue, getTime methods. * * <b>Properties:</b><br> * <b>expiryTime</b>: Expiry time for stored tuples<br> * <b>includeFieldStr</b>: List of comma separated fields to be added to the output tuple. * Ex: Field1,Field2;Field3,Field4<br> * <b>keyFields</b>: List of comma separated key field for both the streams. Ex: Field1,Field2<br> * <b>timeFields</b>: List of comma separated time field for both the streams. Ex: Field1,Field2<br> * <b>bucketSpanInMillis</b>: Span of each bucket in milliseconds.<br> * <b>strategy</b>: Type of join operation. Default type is inner join<br> * <br> * * <b> Example: </b> <br> * Left input port receives customer details and right input port receives Order details. * Schema for the Customer be in the form of * Schema for the Order be in the form of * Now, Join the tuples of Customer and Order streams where Customer.ID = Order.CID and the constraint is * matched tuples must have timestamp within 5 minutes. * Here, key Fields = ID, CID and Time Fields = RTime, OTime, expiryTime = 5 minutes </b> <br> * * * @displayName Abstract Join Operator * @tags join * * @since 3.4.0 */ @InterfaceStability.Unstable public abstract class AbstractJoinOperator<T> extends BaseOperator implements Operator.CheckpointListener { @AutoMetric private long tuplesJoinedPerSec; private double windowTimeSec; protected int tuplesCount; public final transient DefaultOutputPort<List<T>> outputPort = new DefaultOutputPort<>(); // Strategy of Join operation, by default the option is inner join protected JoinStrategy strategy = JoinStrategy.INNER_JOIN; // This represents whether the processing tuple is from left port or not protected boolean isLeft; @InputPortFieldAnnotation public transient DefaultInputPort<T> input1 = new DefaultInputPort<T>() { @Override public void process(T tuple) { isLeft = true; processTuple(tuple); } }; @InputPortFieldAnnotation public transient DefaultInputPort<T> input2 = new DefaultInputPort<T>() { @Override public void process(T tuple) { isLeft = false; processTuple(tuple); } }; // Stores for each of the input port @NotNull protected StoreContext leftStore; @NotNull protected StoreContext rightStore; private String includeFieldStr; private String keyFieldStr; private String timeFieldStr; @Override public void setup(Context.OperatorContext context) { // Checks whether the strategy is outer join and set it to store boolean isOuter = strategy.equals(JoinStrategy.LEFT_OUTER_JOIN) || strategy.equals(JoinStrategy.OUTER_JOIN); leftStore.getStore().isOuterJoin(isOuter); isOuter = strategy.equals(JoinStrategy.RIGHT_OUTER_JOIN) || strategy.equals(JoinStrategy.OUTER_JOIN); rightStore.getStore().isOuterJoin(isOuter); // Setup the stores leftStore.getStore().setup(context); rightStore.getStore().setup(context); populateFields(); windowTimeSec = (context.getValue(Context.OperatorContext.APPLICATION_WINDOW_COUNT) * context.getValue(Context.DAGContext.STREAMING_WINDOW_SIZE_MILLIS) * 1.0) / 1000.0; } /** * Create the event with the given tuple. If it successfully inserted it into the store * then it does the join operation * * @param tuple Tuple to process */ protected void processTuple(T tuple) { JoinStore store = isLeft ? leftStore.getStore() : rightStore.getStore(); TimeEvent t = createEvent(tuple); if (store.put(t)) { join(t, isLeft); } } private void populateFields() { populateIncludeFields(); populateKeyFields(); if (timeFieldStr != null) { populateTimeFields(); } } /** * Populate the fields from the includeFiledStr */ private void populateIncludeFields() { String[] portFields = includeFieldStr.split(";"); assert (portFields.length == 2); leftStore.setIncludeFields(portFields[0].split(",")); rightStore.setIncludeFields(portFields[1].split(",")); } /** * Get the tuples from another store based on join constraint and key * * @param tuple input * @param isLeft whether the given tuple is from first port or not */ private void join(TimeEvent tuple, boolean isLeft) { // Get the valid tuples from the store based on key // If the tuple is null means the join type is outer and return unmatched tuples from store. ArrayList<TimeEvent> value; JoinStore store = isLeft ? rightStore.getStore() : leftStore.getStore(); if (tuple != null) { value = (ArrayList<TimeEvent>)store.getValidTuples(tuple); } else { value = (ArrayList<TimeEvent>)store.getUnMatchedTuples(); } // Join the input tuple with the joined tuples if (value != null) { List<T> result = new ArrayList<>(); for (TimeEvent joinedValue : value) { T output = createOutputTuple(); Object tupleValue = null; if (tuple != null) { tupleValue = tuple.getValue(); } copyValue(output, tupleValue, isLeft); copyValue(output, joinedValue.getValue(), !isLeft); result.add(output); joinedValue.setMatch(true); } if (tuple != null) { tuple.setMatch(true); } if (result.size() != 0) { outputPort.emit(result); tuplesCount += result.size(); } } } // Emit the unmatched tuples, if the strategy is outer join @Override public void endWindow() { if (strategy.equals(JoinStrategy.LEFT_OUTER_JOIN) || strategy.equals(JoinStrategy.OUTER_JOIN)) { join(null, false); } if (strategy.equals(JoinStrategy.RIGHT_OUTER_JOIN) || strategy.equals(JoinStrategy.OUTER_JOIN)) { join(null, true); } leftStore.getStore().endWindow(); rightStore.getStore().endWindow(); tuplesJoinedPerSec = (long)(tuplesCount / windowTimeSec); } @Override public void beginWindow(long windowId) { super.beginWindow(windowId); tuplesJoinedPerSec = 0; tuplesCount = 0; } @Override public void checkpointed(long windowId) { leftStore.getStore().checkpointed(windowId); rightStore.getStore().checkpointed(windowId); } @Override public void committed(long windowId) { leftStore.getStore().committed(windowId); rightStore.getStore().committed(windowId); } /** * Convert the given tuple to event * * @param tuple Given tuple to convert into event * @return event */ protected TimeEvent createEvent(Object tuple) { String key = leftStore.getKeys(); String timeField = leftStore.getTimeFields(); if (!isLeft) { key = rightStore.getKeys(); timeField = rightStore.getTimeFields(); } if (timeField != null) { return new TimeEventImpl(getKeyValue(key, tuple), (Long)getTime(timeField, tuple), tuple); } else { return new TimeEventImpl(getKeyValue(key, tuple), Calendar.getInstance().getTimeInMillis(), tuple); } } private void populateKeyFields() { leftStore.setKeys(keyFieldStr.split(",")[0]); rightStore.setKeys(keyFieldStr.split(",")[1]); } public JoinStrategy getStrategy() { return strategy; } public void setStrategy(JoinStrategy strategy) { this.strategy = strategy; } public void setLeftStore(@NotNull JoinStore lStore) { leftStore = new StoreContext(lStore); } public void setRightStore(@NotNull JoinStore rStore) { rightStore = new StoreContext(rStore); } public void setKeyFields(String keyFieldStr) { this.keyFieldStr = keyFieldStr; } public void setTimeFieldStr(String timeFieldStr) { this.timeFieldStr = timeFieldStr; } public void setIncludeFields(String includeFieldStr) { this.includeFieldStr = includeFieldStr; } public StoreContext getLeftStore() { return leftStore; } public StoreContext getRightStore() { return rightStore; } public String getIncludeFieldStr() { return includeFieldStr; } public String getKeyFieldStr() { return keyFieldStr; } public String getTimeFieldStr() { return timeFieldStr; } /** * Specify the comma separated time fields for both steams */ private void populateTimeFields() { leftStore.setTimeFields(timeFieldStr.split(",")[0]); rightStore.setTimeFields(timeFieldStr.split(",")[1]); } public void setStrategy(String policy) { this.strategy = JoinStrategy.valueOf(policy.toUpperCase()); } /** * Create the output object * * @return output tuple */ protected abstract T createOutputTuple(); /** * Get the values from extractTuple and set these values to the output * * @param output otuput tuple * @param extractTuple Extract the values from this tuple * @param isLeft Whether the extracted tuple belongs to left stream or not */ protected abstract void copyValue(T output, Object extractTuple, boolean isLeft); /** * Get the value of the key field from the given tuple * * @param keyField Value of the field to extract from given tuple * @param tuple Given tuple * @return the value of field from given tuple */ protected abstract Object getKeyValue(String keyField, Object tuple); /** * Get the value of the time field from the given tuple * * @param field Time field * @param tuple given tuple * @return the value of time field from given tuple */ protected abstract Object getTime(String field, Object tuple); public static enum JoinStrategy { INNER_JOIN, LEFT_OUTER_JOIN, RIGHT_OUTER_JOIN, OUTER_JOIN } public static class StoreContext { private transient String timeFields; private transient String[] includeFields; private transient String keys; private JoinStore store; public StoreContext(JoinStore store) { this.store = store; } public String getTimeFields() { return timeFields; } public void setTimeFields(String timeFields) { this.timeFields = timeFields; } public String[] getIncludeFields() { return includeFields; } public void setIncludeFields(String[] includeFields) { this.includeFields = includeFields; } public String getKeys() { return keys; } public void setKeys(String keys) { this.keys = keys; } public JoinStore getStore() { return store; } public void setStore(JoinStore store) { this.store = store; } } }
package com.smartdevicelink.proxy.rpc; import java.util.Hashtable; import com.smartdevicelink.protocol.enums.FunctionID; import com.smartdevicelink.proxy.RPCResponse; import com.smartdevicelink.util.DebugTool; /** * Subscribe Vehicle Data Response is sent, when SubscribeVehicleData has been called * * @since SmartDeviceLink 2.0 */ public class SubscribeVehicleDataResponse extends RPCResponse { public static final String KEY_SPEED = "speed"; public static final String KEY_RPM = "rpm"; public static final String KEY_FUEL_LEVEL = "fuelLevel"; public static final String KEY_EXTERNAL_TEMPERATURE = "externalTemperature"; public static final String KEY_PRNDL = "prndl"; public static final String KEY_TIRE_PRESSURE = "tirePressure"; public static final String KEY_ENGINE_TORQUE = "engineTorque"; public static final String KEY_ODOMETER = "odometer"; public static final String KEY_GPS = "gps"; public static final String KEY_FUEL_LEVEL_STATE = "fuelLevel_State"; public static final String KEY_INSTANT_FUEL_CONSUMPTION = "instantFuelConsumption"; public static final String KEY_BELT_STATUS = "beltStatus"; public static final String KEY_BODY_INFORMATION = "bodyInformation"; public static final String KEY_DEVICE_STATUS = "deviceStatus"; public static final String KEY_DRIVER_BRAKING = "driverBraking"; public static final String KEY_WIPER_STATUS = "wiperStatus"; public static final String KEY_HEAD_LAMP_STATUS = "headLampStatus"; public static final String KEY_ACC_PEDAL_POSITION = "accPedalPosition"; public static final String KEY_STEERING_WHEEL_ANGLE = "steeringWheelAngle"; public static final String KEY_E_CALL_INFO = "eCallInfo"; public static final String KEY_AIRBAG_STATUS = "airbagStatus"; public static final String KEY_EMERGENCY_EVENT = "emergencyEvent"; public static final String KEY_CLUSTER_MODE_STATUS = "clusterModeStatus"; public static final String KEY_MY_KEY = "myKey"; /** * Constructs a new SubscribeVehicleDataResponse object */ public SubscribeVehicleDataResponse() { super(FunctionID.SUBSCRIBE_VEHICLE_DATA.toString()); } /** * Constructs a new SubscribeVehicleDataResponse object indicated by the Hashtable * parameter * <p> * * @param hash * The Hashtable to use */ public SubscribeVehicleDataResponse(Hashtable<String, Object> hash) { super(hash); } /** * Sets gps * @param gps */ public void setGps(VehicleDataResult gps) { if (gps != null) { parameters.put(KEY_GPS, gps); } else { parameters.remove(KEY_GPS); } } /** * Gets gps * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getGps() { Object obj = parameters.get(KEY_GPS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_GPS, e); } } return null; } /** * Sets speed * @param speed */ public void setSpeed(VehicleDataResult speed) { if (speed != null) { parameters.put(KEY_SPEED, speed); } else { parameters.remove(KEY_SPEED); } } /** * Gets speed * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getSpeed() { Object obj = parameters.get(KEY_SPEED); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_SPEED, e); } } return null; } /** * Sets rpm * @param rpm */ public void setRpm(VehicleDataResult rpm) { if (rpm != null) { parameters.put(KEY_RPM, rpm); } else { parameters.remove(KEY_RPM); } } /** * Gets rpm * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getRpm() { Object obj = parameters.get(KEY_RPM); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_RPM, e); } } return null; } /** * Sets Fuel Level * @param fuelLevel */ public void setFuelLevel(VehicleDataResult fuelLevel) { if (fuelLevel != null) { parameters.put(KEY_FUEL_LEVEL, fuelLevel); } else { parameters.remove(KEY_FUEL_LEVEL); } } /** * Gets Fuel Level * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getFuelLevel() { Object obj = parameters.get(KEY_FUEL_LEVEL); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_FUEL_LEVEL, e); } } return null; } /** * Sets Fuel Level State * @param fuelLevel_State */ @Deprecated public void setFuelLevel_State(VehicleDataResult fuelLevel_State) { setFuelLevelState(fuelLevel_State); } /** * Gets Fuel Level State * @return VehicleDataResult */ @Deprecated public VehicleDataResult getFuelLevel_State() { return getFuelLevelState(); } /** * Sets Fuel Level State * @param fuelLevelState */ public void setFuelLevelState(VehicleDataResult fuelLevelState) { if (fuelLevelState != null) { parameters.put(KEY_FUEL_LEVEL_STATE, fuelLevelState); } else { parameters.remove(KEY_FUEL_LEVEL_STATE); } } /** * Gets Fuel Level State * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getFuelLevelState() { Object obj = parameters.get(KEY_FUEL_LEVEL_STATE); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { return new VehicleDataResult((Hashtable<String, Object>) obj); } return null; } /** * Sets Instant Fuel Comsumption * @param instantFuelConsumption */ public void setInstantFuelConsumption(VehicleDataResult instantFuelConsumption) { if (instantFuelConsumption != null) { parameters.put(KEY_INSTANT_FUEL_CONSUMPTION, instantFuelConsumption); } else { parameters.remove(KEY_INSTANT_FUEL_CONSUMPTION); } } /** * Gets Instant Fuel Consumption * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getInstantFuelConsumption() { Object obj = parameters.get(KEY_INSTANT_FUEL_CONSUMPTION); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_INSTANT_FUEL_CONSUMPTION, e); } } return null; } /** * Sets External Temperature * @param externalTemperature */ public void setExternalTemperature(VehicleDataResult externalTemperature) { if (externalTemperature != null) { parameters.put(KEY_EXTERNAL_TEMPERATURE, externalTemperature); } else { parameters.remove(KEY_EXTERNAL_TEMPERATURE); } } /** * Gets External Temperature * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getExternalTemperature() { Object obj = parameters.get(KEY_EXTERNAL_TEMPERATURE); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_EXTERNAL_TEMPERATURE, e); } } return null; } /** * Sets currently selected gear data * @param prndl */ public void setPrndl(VehicleDataResult prndl) { if (prndl != null) { parameters.put(KEY_PRNDL, prndl); } else { parameters.remove(KEY_PRNDL); } } /** * Gets currently selected gear data * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getPrndl() { Object obj = parameters.get(KEY_PRNDL); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_PRNDL, e); } } return null; } /** * Sets Tire Pressure * @param tirePressure */ public void setTirePressure(VehicleDataResult tirePressure) { if (tirePressure != null) { parameters.put(KEY_TIRE_PRESSURE, tirePressure); } else { parameters.remove(KEY_TIRE_PRESSURE); } } /** * Gets Tire Pressure * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getTirePressure() { Object obj = parameters.get(KEY_TIRE_PRESSURE); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_TIRE_PRESSURE, e); } } return null; } /** * Sets Odometer * @param odometer */ public void setOdometer(VehicleDataResult odometer) { if (odometer != null) { parameters.put(KEY_ODOMETER, odometer); } else { parameters.remove(KEY_ODOMETER); } } /** * Gets Odometer * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getOdometer() { Object obj = parameters.get(KEY_ODOMETER); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_ODOMETER, e); } } return null; } /** * Sets Belt Status * @param beltStatus */ public void setBeltStatus(VehicleDataResult beltStatus) { if (beltStatus != null) { parameters.put(KEY_BELT_STATUS, beltStatus); } else { parameters.remove(KEY_BELT_STATUS); } } /** * Gets Belt Status * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getBeltStatus() { Object obj = parameters.get(KEY_BELT_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_BELT_STATUS, e); } } return null; } /** * Sets Body Information * @param bodyInformation */ public void setBodyInformation(VehicleDataResult bodyInformation) { if (bodyInformation != null) { parameters.put(KEY_BODY_INFORMATION, bodyInformation); } else { parameters.remove(KEY_BODY_INFORMATION); } } /** * Gets Body Information * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getBodyInformation() { Object obj = parameters.get(KEY_BODY_INFORMATION); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_BODY_INFORMATION, e); } } return null; } /** * Sets Device Status * @param deviceStatus */ public void setDeviceStatus(VehicleDataResult deviceStatus) { if (deviceStatus != null) { parameters.put(KEY_DEVICE_STATUS, deviceStatus); } else { parameters.remove(KEY_DEVICE_STATUS); } } /** * Gets Device Status * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getDeviceStatus() { Object obj = parameters.get(KEY_DEVICE_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_DEVICE_STATUS, e); } } return null; } /** * Sets Driver Barking * @param driverBraking */ public void setDriverBraking(VehicleDataResult driverBraking) { if (driverBraking != null) { parameters.put(KEY_DRIVER_BRAKING, driverBraking); } else { parameters.remove(KEY_DRIVER_BRAKING); } } /** * Gets Driver Barking * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getDriverBraking() { Object obj = parameters.get(KEY_DRIVER_BRAKING); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_DRIVER_BRAKING, e); } } return null; } /** * Sets wiper Status * @param wiperStatus */ public void setWiperStatus(VehicleDataResult wiperStatus) { if (wiperStatus != null) { parameters.put(KEY_WIPER_STATUS, wiperStatus); } else { parameters.remove(KEY_WIPER_STATUS); } } /** * Gets Wiper Status * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getWiperStatus() { Object obj = parameters.get(KEY_WIPER_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_WIPER_STATUS, e); } } return null; } /** * Sets Head Lamp Status * @param headLampStatus */ public void setHeadLampStatus(VehicleDataResult headLampStatus) { if (headLampStatus != null) { parameters.put(KEY_HEAD_LAMP_STATUS, headLampStatus); } else { parameters.remove(KEY_HEAD_LAMP_STATUS); } } /** * Gets Head Lamp Status * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getHeadLampStatus() { Object obj = parameters.get(KEY_HEAD_LAMP_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_HEAD_LAMP_STATUS, e); } } return null; } /** * Sets Engine Torque * @param engineTorque */ public void setEngineTorque(VehicleDataResult engineTorque) { if (engineTorque != null) { parameters.put(KEY_ENGINE_TORQUE, engineTorque); } else { parameters.remove(KEY_ENGINE_TORQUE); } } /** * Gets Engine Torque * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getEngineTorque() { Object obj = parameters.get(KEY_ENGINE_TORQUE); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_ENGINE_TORQUE, e); } } return null; } /** * Sets AccPedal Position * @param accPedalPosition */ public void setAccPedalPosition(VehicleDataResult accPedalPosition) { if (accPedalPosition != null) { parameters.put(KEY_ACC_PEDAL_POSITION, accPedalPosition); } else { parameters.remove(KEY_ACC_PEDAL_POSITION); } } /** * Gets AccPedal Position * @return VehicleDataResult */ @SuppressWarnings("unchecked") public VehicleDataResult getAccPedalPosition() { Object obj = parameters.get(KEY_ACC_PEDAL_POSITION); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_ACC_PEDAL_POSITION, e); } } return null; } public void setSteeringWheelAngle(VehicleDataResult steeringWheelAngle) { if (steeringWheelAngle != null) { parameters.put(KEY_STEERING_WHEEL_ANGLE, steeringWheelAngle); } else { parameters.remove(KEY_STEERING_WHEEL_ANGLE); } } @SuppressWarnings("unchecked") public VehicleDataResult getSteeringWheelAngle() { Object obj = parameters.get(KEY_STEERING_WHEEL_ANGLE); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_STEERING_WHEEL_ANGLE, e); } } return null; } public void setECallInfo(VehicleDataResult eCallInfo) { if (eCallInfo != null) { parameters.put(KEY_E_CALL_INFO, eCallInfo); } else { parameters.remove(KEY_E_CALL_INFO); } } @SuppressWarnings("unchecked") public VehicleDataResult getECallInfo() { Object obj = parameters.get(KEY_E_CALL_INFO); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_E_CALL_INFO, e); } } return null; } public void setAirbagStatus(VehicleDataResult airbagStatus) { if (airbagStatus != null) { parameters.put(KEY_AIRBAG_STATUS, airbagStatus); } else { parameters.remove(KEY_AIRBAG_STATUS); } } @SuppressWarnings("unchecked") public VehicleDataResult getAirbagStatus() { Object obj = parameters.get(KEY_AIRBAG_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_AIRBAG_STATUS, e); } } return null; } public void setEmergencyEvent(VehicleDataResult emergencyEvent) { if (emergencyEvent != null) { parameters.put(KEY_EMERGENCY_EVENT, emergencyEvent); } else { parameters.remove(KEY_EMERGENCY_EVENT); } } @SuppressWarnings("unchecked") public VehicleDataResult getEmergencyEvent() { Object obj = parameters.get(KEY_EMERGENCY_EVENT); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_EMERGENCY_EVENT, e); } } return null; } public void setClusterModeStatus(VehicleDataResult clusterModeStatus) { if (clusterModeStatus != null) { parameters.put(KEY_CLUSTER_MODE_STATUS, clusterModeStatus); } else { parameters.remove(KEY_CLUSTER_MODE_STATUS); } } @SuppressWarnings("unchecked") public VehicleDataResult getClusterModeStatus() { Object obj = parameters.get(KEY_CLUSTER_MODE_STATUS); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_CLUSTER_MODE_STATUS, e); } } return null; } public void setMyKey(VehicleDataResult myKey) { if (myKey != null) { parameters.put(KEY_MY_KEY, myKey); } else { parameters.remove(KEY_MY_KEY); } } @SuppressWarnings("unchecked") public VehicleDataResult getMyKey() { Object obj = parameters.get(KEY_MY_KEY); if (obj instanceof VehicleDataResult) { return (VehicleDataResult) obj; } else if (obj instanceof Hashtable) { try { return new VehicleDataResult((Hashtable<String, Object>) obj); } catch (Exception e) { DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + KEY_MY_KEY, e); } } return null; } }
/* * Copyright 2011 LMAX Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lmax.disruptor.collections; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Arrays; /** * Histogram for tracking the frequency of observations of values below interval upper bounds. * * This class is useful for recording timings in nanoseconds across a large number of observations * when high performance is required. */ public final class Histogram { private final long[] upperBounds; private final long[] counts; private long minValue = Long.MAX_VALUE; private long maxValue = 0L; /** * Create a new Histogram with a provided list of interval bounds. * * @param upperBounds of the intervals. */ public Histogram(final long[] upperBounds) { validateBounds(upperBounds); this.upperBounds = Arrays.copyOf(upperBounds, upperBounds.length); this.counts = new long[upperBounds.length]; } private void validateBounds(final long[] upperBounds) { long lastBound = -1L; for (final long bound : upperBounds) { if (bound <= 0L) { throw new IllegalArgumentException("Bounds must be positive values"); } if (bound <= lastBound) { throw new IllegalArgumentException("bound " + bound + " is not greater than " + lastBound); } lastBound = bound; } } /** * Size of the list of interval bars. * * @return size of the interval bar list. */ public int getSize() { return upperBounds.length; } /** * Get the upper bound of an interval for an index. * * @param index of the upper bound. * @return the interval upper bound for the index. */ public long getUpperBoundAt(final int index) { return upperBounds[index]; } /** * Get the count of observations at a given index. * * @param index of the observations counter. * @return the count of observations at a given index. */ public long getCountAt(final int index) { return counts[index]; } /** * Add an observation to the histogram and increment the counter for the interval it matches. * * @param value for the observation to be added. * @return return true if in the range of intervals otherwise false. */ public boolean addObservation(final long value) { int low = 0; int high = upperBounds.length - 1; while (low < high) { int mid = low + ((high - low) >> 1); if (upperBounds[mid] < value) { low = mid + 1; } else { high = mid; } } if (value <= upperBounds[high]) { counts[high]++; trackRange(value); return true; } return false; } private void trackRange(final long value) { if (value < minValue) { minValue = value; } else if (value > maxValue) { maxValue = value; } } /** * Add observations from another Histogram into this one. * Histograms must have the same intervals. * * @param histogram from which to add the observation counts. */ public void addObservations(final Histogram histogram) { if (upperBounds.length != histogram.upperBounds.length) { throw new IllegalArgumentException("Histograms must have matching intervals"); } for (int i = 0, size = upperBounds.length; i < size; i++) { if (upperBounds[i] != histogram.upperBounds[i]) { throw new IllegalArgumentException("Histograms must have matching intervals"); } } for (int i = 0, size = counts.length; i < size; i++) { counts[i] += histogram.counts[i]; } trackRange(histogram.minValue); trackRange(histogram.maxValue); } /** * Clear the list of interval counters. */ public void clear() { maxValue = 0L; minValue = Long.MAX_VALUE; for (int i = 0, size = counts.length; i < size; i++) { counts[i] = 0L; } } /** * Count total number of recorded observations. * * @return the total number of recorded observations. */ public long getCount() { long count = 0L; for (int i = 0, size = counts.length; i < size; i++) { count += counts[i]; } return count; } /** * Get the minimum observed value. * * @return the minimum value observed. */ public long getMin() { return minValue; } /** * Get the maximum observed value. * * @return the maximum of the observed values; */ public long getMax() { return maxValue; } /** * Calculate the mean of all recorded observations. * * The mean is calculated by the summing the mid points of each interval multiplied by the count * for that interval, then dividing by the total count of observations. The max and min are * considered for adjusting the top and bottom bin when calculating the mid point. * * @return the mean of all recorded observations. */ public BigDecimal getMean() { if (0L == getCount()) { return BigDecimal.ZERO; } long lowerBound = counts[0] > 0L ? minValue : 0L; BigDecimal total = BigDecimal.ZERO; for (int i = 0, size = upperBounds.length; i < size; i++) { if (0L != counts[i]) { long upperBound = Math.min(upperBounds[i], maxValue); long midPoint = lowerBound + ((upperBound - lowerBound) / 2L); BigDecimal intervalTotal = new BigDecimal(midPoint).multiply(new BigDecimal(counts[i])); total = total.add(intervalTotal); } lowerBound = Math.max(upperBounds[i] + 1L, minValue); } return total.divide(new BigDecimal(getCount()), 2, RoundingMode.HALF_UP); } /** * Calculate the upper bound within which 99% of observations fall. * * @return the upper bound for 99% of observations. */ public long getTwoNinesUpperBound() { return getUpperBoundForFactor(0.99d); } /** * Calculate the upper bound within which 99.99% of observations fall. * * @return the upper bound for 99.99% of observations. */ public long getFourNinesUpperBound() { return getUpperBoundForFactor(0.9999d); } /** * Get the interval upper bound for a given factor of the observation population. * * @param factor representing the size of the population. * @return the interval upper bound. */ public long getUpperBoundForFactor(final double factor) { if (0.0d >= factor || factor >= 1.0d) { throw new IllegalArgumentException("factor must be >= 0.0 and <= 1.0"); } final long totalCount = getCount(); final long tailTotal = totalCount - Math.round(totalCount * factor); long tailCount = 0L; for (int i = counts.length - 1; i >= 0; i--) { if (0L != counts[i]) { tailCount += counts[i]; if (tailCount >= tailTotal) { return upperBounds[i]; } } } return 0L; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Histogram{"); sb.append("min=").append(getMin()).append(", "); sb.append("max=").append(getMax()).append(", "); sb.append("mean=").append(getMean()).append(", "); sb.append("99%=").append(getTwoNinesUpperBound()).append(", "); sb.append("99.99%=").append(getFourNinesUpperBound()).append(", "); sb.append('['); for (int i = 0, size = counts.length; i < size; i++) { sb.append(upperBounds[i]).append('=').append(counts[i]).append(", "); } if (counts.length > 0) { sb.setLength(sb.length() - 2); } sb.append(']'); sb.append('}'); return sb.toString(); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.impl.test; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.Callable; import org.flowable.engine.ManagementService; import org.flowable.engine.ProcessEngineConfiguration; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.impl.asyncexecutor.AsyncExecutor; import org.flowable.engine.test.FlowableRule; /** * @author Joram Barrez * @author Tijs Rademakers * @author Saeid Mirzaei */ // This helper class helps sharing the same code for jobExecutor test helpers, // between Junit3 and junit 4 test support classes public class JobTestHelper { public static void waitForJobExecutorToProcessAllJobs(FlowableRule activitiRule, long maxMillisToWait, long intervalMillis) { waitForJobExecutorToProcessAllJobs(activitiRule.getProcessEngine().getProcessEngineConfiguration(), activitiRule.getManagementService(), maxMillisToWait, intervalMillis); } public static void waitForJobExecutorToProcessAllJobs(ProcessEngineConfiguration processEngineConfiguration, ManagementService managementService, long maxMillisToWait, long intervalMillis) { waitForJobExecutorToProcessAllJobs(processEngineConfiguration, managementService, maxMillisToWait, intervalMillis, true); } public static void waitForJobExecutorToProcessAllJobs(ProcessEngineConfiguration processEngineConfiguration, ManagementService managementService, long maxMillisToWait, long intervalMillis, boolean shutdownExecutorWhenFinished) { AsyncExecutor asyncExecutor = processEngineConfiguration.getAsyncExecutor(); asyncExecutor.start(); try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); boolean areJobsAvailable = true; try { while (areJobsAvailable && !task.isTimeLimitExceeded()) { Thread.sleep(intervalMillis); try { areJobsAvailable = areJobsAvailable(managementService); } catch (Throwable t) { // Ignore, possible that exception occurs due to locking/updating of table on MSSQL when // isolation level doesn't allow READ of the table } } } catch (InterruptedException e) { // ignore } finally { timer.cancel(); } if (areJobsAvailable) { throw new FlowableException("time limit of " + maxMillisToWait + " was exceeded"); } } finally { if (shutdownExecutorWhenFinished) { asyncExecutor.shutdown(); } } } public static void waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(ProcessEngineConfiguration processEngineConfiguration, ManagementService managementService, long maxMillisToWait, long intervalMillis) { waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(processEngineConfiguration, managementService, maxMillisToWait, intervalMillis, true); } public static void waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(ProcessEngineConfiguration processEngineConfiguration, ManagementService managementService, long maxMillisToWait, long intervalMillis, boolean shutdownExecutorWhenFinished) { AsyncExecutor asyncExecutor = processEngineConfiguration.getAsyncExecutor(); asyncExecutor.start(); processEngineConfiguration.setAsyncExecutorActivate(true); try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); boolean areJobsAvailable = true; try { while (areJobsAvailable && !task.isTimeLimitExceeded()) { Thread.sleep(intervalMillis); try { areJobsAvailable = areJobsOrExecutableTimersAvailable(managementService); } catch (Throwable t) { // Ignore, possible that exception occurs due to locking/updating of table on MSSQL when // isolation level doesn't allow READ of the table } } } catch (InterruptedException e) { // ignore } finally { timer.cancel(); } if (areJobsAvailable) { throw new FlowableException("time limit of " + maxMillisToWait + " was exceeded"); } } finally { if (shutdownExecutorWhenFinished) { processEngineConfiguration.setAsyncExecutorActivate(false); asyncExecutor.shutdown(); } } } public static void waitForJobExecutorOnCondition(FlowableRule activitiRule, long maxMillisToWait, long intervalMillis, Callable<Boolean> condition) { waitForJobExecutorOnCondition(activitiRule.getProcessEngine().getProcessEngineConfiguration(), maxMillisToWait, intervalMillis, condition); } public static void waitForJobExecutorOnCondition(ProcessEngineConfiguration processEngineConfiguration, long maxMillisToWait, long intervalMillis, Callable<Boolean> condition) { AsyncExecutor asyncExecutor = processEngineConfiguration.getAsyncExecutor(); asyncExecutor.start(); try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); boolean conditionIsViolated = true; try { while (conditionIsViolated) { Thread.sleep(intervalMillis); conditionIsViolated = !condition.call(); } } catch (InterruptedException e) { // ignore } catch (Exception e) { throw new FlowableException("Exception while waiting on condition: " + e.getMessage(), e); } finally { timer.cancel(); } if (conditionIsViolated) { throw new FlowableException("time limit of " + maxMillisToWait + " was exceeded"); } } finally { asyncExecutor.shutdown(); } } public static void executeJobExecutorForTime(FlowableRule activitiRule, long maxMillisToWait, long intervalMillis) { executeJobExecutorForTime(activitiRule.getProcessEngine().getProcessEngineConfiguration(), maxMillisToWait, intervalMillis); } public static void executeJobExecutorForTime(ProcessEngineConfiguration processEngineConfiguration, long maxMillisToWait, long intervalMillis) { AsyncExecutor asyncExecutor = processEngineConfiguration.getAsyncExecutor(); asyncExecutor.start(); try { Timer timer = new Timer(); InterruptTask task = new InterruptTask(Thread.currentThread()); timer.schedule(task, maxMillisToWait); try { while (!task.isTimeLimitExceeded()) { Thread.sleep(intervalMillis); } } catch (InterruptedException e) { // ignore } finally { timer.cancel(); } } finally { asyncExecutor.shutdown(); } } public static boolean areJobsAvailable(FlowableRule activitiRule) { return areJobsAvailable(activitiRule.getManagementService()); } public static boolean areJobsAvailable(ManagementService managementService) { return !managementService.createJobQuery().list().isEmpty(); } public static boolean areJobsOrExecutableTimersAvailable(ManagementService managementService) { boolean emptyJobs = managementService.createJobQuery().list().isEmpty(); if (emptyJobs) { return !managementService.createTimerJobQuery().executable().list().isEmpty(); } else { return true; } } private static class InterruptTask extends TimerTask { protected boolean timeLimitExceeded; protected Thread thread; public InterruptTask(Thread thread) { this.thread = thread; } public boolean isTimeLimitExceeded() { return timeLimitExceeded; } public void run() { timeLimitExceeded = true; thread.interrupt(); } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.ncbo.resource_access_tools.resource.nif.ctdpathway; import org.ncbo.resource_access_tools.enumeration.ResourceType; import org.ncbo.resource_access_tools.populate.Element; import org.ncbo.resource_access_tools.populate.Structure; import org.ncbo.resource_access_tools.resource.nif.AbstractNifResourceAccessTool; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.zip.GZIPInputStream; /** * AccessTool for CTD Pathways (via NIF). * * @author r.malviya */ public class CTDPAccessTool extends AbstractNifResourceAccessTool { private static final String URL = "http://ctdbase.org/"; private static final String NAME = "CTD Pathway (via NIF)"; private static final String RESOURCEID = "CTDP"; private static final String DESCRIPTION = "A public database that enhances understanding about the effects of environmental chemicals on human health. " + "In detail, it contains information about gene/protein-disease associations, chemical-disease associations, interactions between chemicals and genes/proteins, " + "as well as the related pathways."; private static final String LOGO = "http://neurolex.org/w/images/b/bb/CTD.PNG"; private static final String ELT_URL = "http://ctdbase.org/detail.go?type=pathway&acc="; private static final String[] ITEMKEYS = {"Pathway"}; private static final Double[] WEIGHTS = {1.0}; private static final String[] ONTOIDS = {Structure.FOR_CONCEPT_RECOGNITION}; private static final Structure STRUCTURE = new Structure(ITEMKEYS, RESOURCEID, WEIGHTS, ONTOIDS); private static char seprator = ' '; // constructors public CTDPAccessTool() { super(NAME, RESOURCEID, STRUCTURE); try { this.getToolResource().setResourceURL(new URL(URL)); this.getToolResource().setResourceDescription(DESCRIPTION); this.getToolResource().setResourceLogo(new URL(LOGO)); this.getToolResource().setResourceElementURL(ELT_URL); } catch (MalformedURLException e) { logger.error(EMPTY_STRING, e); } } @Override public ResourceType getResourceType() { return ResourceType.BIG; } @Override public void updateResourceInformation() { // TODO // can be used to update resource name, description, logo, elt_url. } @Override public HashSet<String> queryOnlineResource(String query) { // TODO // not used for caArray return new HashSet<String>(); } @Override public String elementURLString(String elementLocalID) { return ELT_URL + elementLocalID; } @Override public String mainContextDescriptor() { String MAIN_ITEMKEY = "Pathway"; return MAIN_ITEMKEY; } /** * This method creates map of latest version of ontology with contexts as * key. It uses virtual ontology ids associated with contexts. * * @param structure {@code Structure} for given resource * @return {@code HashMap} of latest local ontology id with context as key. */ private HashMap<String, String> createLocalOntologyIDMap(Structure structure) { HashMap<String, String> localOntologyIDMap = new HashMap<String, String>(); String virtualOntologyID; for (String contextName : structure.getOntoIds().keySet()) { virtualOntologyID = structure.getOntoIds().get(contextName); if (!virtualOntologyID.equals(Structure.FOR_CONCEPT_RECOGNITION) && !virtualOntologyID.equals(Structure.NOT_FOR_ANNOTATION)) { localOntologyIDMap.put(contextName, ontlogyService .getLatestLocalOntologyID(virtualOntologyID)); } } return localOntologyIDMap; } @Override public int updateResourceContent() { int nbElement = 0; this.getAllElements(); return nbElement; } /** * This method is used to get all elements from resource site. * * @return HashSet<Element> */ @SuppressWarnings("resource") private void getAllElements() { try { HashSet<String> allElementsInET = this.resourceUpdateService .getAllLocalElementIDs(); Map<String, Map<String, String>> allRowsData = new HashMap<String, Map<String, String>>(); URL csvFile = new URL( "http://ctdbase.org/reports/CTD_pathways.tsv.gz"); BufferedReader fileReader = new BufferedReader( new InputStreamReader(new GZIPInputStream( csvFile.openStream()))); String headerRow; int a = 1, b = 0; final String DELIMITER = " "; while ((headerRow = fileReader.readLine()) != null) { // Get all tokens available in line String[] tokens = headerRow.split(DELIMITER); if (tokens[0].equals("# PathwayName")) b = a + 2; if (b == a) { break; } a++; } while (headerRow != null) { int rowCount = 0; while ((headerRow = fileReader.readLine()) != null) { String[] tokens = headerRow.split(DELIMITER); for (int i = 0, j = 0; i < tokens.length; i++) { Map<String, String> elementAttributes = new HashMap<String, String>(); String localElementId = EMPTY_STRING; while (i < tokens.length) { if (i == 1) localElementId = tokens[i]; else { elementAttributes.put(Structure .generateContextName(RESOURCEID, ITEMKEYS[j]), tokens[i]); j++; } i++; } if (allElementsInET.contains(localElementId)) { } else { rowCount++; allRowsData.put(localElementId, elementAttributes); } } double MAX_ROW = 200; if (rowCount == MAX_ROW) { break; } }// parsing ends // Second phase: creation of elements if (rowCount > 1) { HashSet<Element> elementSet = new HashSet<Element>(); for (String localElementID : allRowsData.keySet()) { Map<String, String> elementAttributes; elementAttributes = allRowsData.get(localElementID); // PUT DATA INTO A // STRUCTURE++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Structure elementStructure = new Structure( STRUCTURE.getContextNames()); for (String contextName : STRUCTURE.getContextNames()) { boolean attributeHasValue = false; for (String att : elementAttributes.keySet()) { if (contextName.equals(att)) { // not an existing annotation if (STRUCTURE .getOntoID(contextName) .equals(Structure.FOR_CONCEPT_RECOGNITION) || STRUCTURE .getOntoID(contextName) .equals(Structure.NOT_FOR_ANNOTATION)) { elementStructure.putContext( contextName, elementAttributes.get(att)); attributeHasValue = true; } } } // to avoid null value in the structure if (!attributeHasValue) { elementStructure.putContext(contextName, EMPTY_STRING); } } // put the element structure in a new element try { Element exp = new Element(localElementID, elementStructure); elementSet.add(exp); } catch (Element.BadElementStructureException e) { logger.error(EMPTY_STRING, e); } } allRowsData.clear(); Element myExp; int nbElement = 0; Iterator<Element> i = elementSet.iterator(); while (i.hasNext()) { rowCount++; myExp = i.next(); try { if (!myExp.getElementStructure().hasNullValues()) { if (this.resourceUpdateService .addElement(myExp)) { nbElement++; } } } catch (Exception e) { logger.error( "** PROBLEM ** Problem with id " + myExp.getLocalElementId() + " when populating the OBR_CTDCD_ET table.", e); } } logger.info(nbElement + " elements added to the OBR_CTDCD_ET table."); } } } catch (Exception e) { logger.error("** PROBLEM ** Problem in getting rows.", e); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: route_guide.proto package io.grpc.examples.routeguide; /** * Protobuf type {@code routeguide.Feature} * * <pre> * A feature names something at a given point. * If a feature could not be named, the name is empty. * </pre> */ public final class Feature extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:routeguide.Feature) FeatureOrBuilder { // Use Feature.newBuilder() to construct. private Feature(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private Feature() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private Feature( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { String s = input.readStringRequireUtf8(); name_ = s; break; } case 18: { io.grpc.examples.routeguide.Point.Builder subBuilder = null; if (location_ != null) { subBuilder = location_.toBuilder(); } location_ = input.readMessage(io.grpc.examples.routeguide.Point.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(location_); location_ = subBuilder.buildPartial(); } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw new RuntimeException(e.setUnfinishedMessage(this)); } catch (java.io.IOException e) { throw new RuntimeException( new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this)); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.examples.routeguide.RouteGuideProto.internal_static_routeguide_Feature_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.examples.routeguide.RouteGuideProto.internal_static_routeguide_Feature_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.examples.routeguide.Feature.class, io.grpc.examples.routeguide.Feature.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCATION_FIELD_NUMBER = 2; private io.grpc.examples.routeguide.Point location_; /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public boolean hasLocation() { return location_ != null; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public io.grpc.examples.routeguide.Point getLocation() { return location_ == null ? io.grpc.examples.routeguide.Point.getDefaultInstance() : location_; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public io.grpc.examples.routeguide.PointOrBuilder getLocationOrBuilder() { return getLocation(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); } if (location_ != null) { output.writeMessage(2, getLocation()); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); } if (location_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getLocation()); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static io.grpc.examples.routeguide.Feature parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.examples.routeguide.Feature parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.examples.routeguide.Feature parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.examples.routeguide.Feature parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.examples.routeguide.Feature parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static io.grpc.examples.routeguide.Feature parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static io.grpc.examples.routeguide.Feature parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static io.grpc.examples.routeguide.Feature parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static io.grpc.examples.routeguide.Feature parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static io.grpc.examples.routeguide.Feature parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grpc.examples.routeguide.Feature prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code routeguide.Feature} * * <pre> * A feature names something at a given point. * If a feature could not be named, the name is empty. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:routeguide.Feature) io.grpc.examples.routeguide.FeatureOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.examples.routeguide.RouteGuideProto.internal_static_routeguide_Feature_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.examples.routeguide.RouteGuideProto.internal_static_routeguide_Feature_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.examples.routeguide.Feature.class, io.grpc.examples.routeguide.Feature.Builder.class); } // Construct using io.grpc.examples.routeguide.Feature.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; if (locationBuilder_ == null) { location_ = null; } else { location_ = null; locationBuilder_ = null; } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grpc.examples.routeguide.RouteGuideProto.internal_static_routeguide_Feature_descriptor; } public io.grpc.examples.routeguide.Feature getDefaultInstanceForType() { return io.grpc.examples.routeguide.Feature.getDefaultInstance(); } public io.grpc.examples.routeguide.Feature build() { io.grpc.examples.routeguide.Feature result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public io.grpc.examples.routeguide.Feature buildPartial() { io.grpc.examples.routeguide.Feature result = new io.grpc.examples.routeguide.Feature(this); result.name_ = name_; if (locationBuilder_ == null) { result.location_ = location_; } else { result.location_ = locationBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grpc.examples.routeguide.Feature) { return mergeFrom((io.grpc.examples.routeguide.Feature)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grpc.examples.routeguide.Feature other) { if (other == io.grpc.examples.routeguide.Feature.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.hasLocation()) { mergeLocation(other.getLocation()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { io.grpc.examples.routeguide.Feature parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (io.grpc.examples.routeguide.Feature) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> * * <pre> * The name of the feature. * </pre> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private io.grpc.examples.routeguide.Point location_ = null; private com.google.protobuf.SingleFieldBuilder< io.grpc.examples.routeguide.Point, io.grpc.examples.routeguide.Point.Builder, io.grpc.examples.routeguide.PointOrBuilder> locationBuilder_; /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public boolean hasLocation() { return locationBuilder_ != null || location_ != null; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public io.grpc.examples.routeguide.Point getLocation() { if (locationBuilder_ == null) { return location_ == null ? io.grpc.examples.routeguide.Point.getDefaultInstance() : location_; } else { return locationBuilder_.getMessage(); } } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public Builder setLocation(io.grpc.examples.routeguide.Point value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } location_ = value; onChanged(); } else { locationBuilder_.setMessage(value); } return this; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public Builder setLocation( io.grpc.examples.routeguide.Point.Builder builderForValue) { if (locationBuilder_ == null) { location_ = builderForValue.build(); onChanged(); } else { locationBuilder_.setMessage(builderForValue.build()); } return this; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public Builder mergeLocation(io.grpc.examples.routeguide.Point value) { if (locationBuilder_ == null) { if (location_ != null) { location_ = io.grpc.examples.routeguide.Point.newBuilder(location_).mergeFrom(value).buildPartial(); } else { location_ = value; } onChanged(); } else { locationBuilder_.mergeFrom(value); } return this; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public Builder clearLocation() { if (locationBuilder_ == null) { location_ = null; onChanged(); } else { location_ = null; locationBuilder_ = null; } return this; } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public io.grpc.examples.routeguide.Point.Builder getLocationBuilder() { onChanged(); return getLocationFieldBuilder().getBuilder(); } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ public io.grpc.examples.routeguide.PointOrBuilder getLocationOrBuilder() { if (locationBuilder_ != null) { return locationBuilder_.getMessageOrBuilder(); } else { return location_ == null ? io.grpc.examples.routeguide.Point.getDefaultInstance() : location_; } } /** * <code>optional .routeguide.Point location = 2;</code> * * <pre> * The point where the feature is detected. * </pre> */ private com.google.protobuf.SingleFieldBuilder< io.grpc.examples.routeguide.Point, io.grpc.examples.routeguide.Point.Builder, io.grpc.examples.routeguide.PointOrBuilder> getLocationFieldBuilder() { if (locationBuilder_ == null) { locationBuilder_ = new com.google.protobuf.SingleFieldBuilder< io.grpc.examples.routeguide.Point, io.grpc.examples.routeguide.Point.Builder, io.grpc.examples.routeguide.PointOrBuilder>( getLocation(), getParentForChildren(), isClean()); location_ = null; } return locationBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:routeguide.Feature) } // @@protoc_insertion_point(class_scope:routeguide.Feature) private static final io.grpc.examples.routeguide.Feature DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grpc.examples.routeguide.Feature(); } public static io.grpc.examples.routeguide.Feature getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Feature> PARSER = new com.google.protobuf.AbstractParser<Feature>() { public Feature parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { try { return new Feature(input, extensionRegistry); } catch (RuntimeException e) { if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); } throw e; } } }; public static com.google.protobuf.Parser<Feature> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Feature> getParserForType() { return PARSER; } public io.grpc.examples.routeguide.Feature getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.journal.impl; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Collection; import java.util.List; import java.util.concurrent.BlockingDeque; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.activemq.artemis.core.journal.SequentialFile; import org.apache.activemq.artemis.core.journal.SequentialFileFactory; import org.apache.activemq.artemis.journal.ActiveMQJournalLogger; /** * This is a helper class for the Journal, which will control access to dataFiles, openedFiles and freeFiles * Guaranteeing that they will be delivered in order to the Journal */ public class JournalFilesRepository { private static final boolean trace = ActiveMQJournalLogger.LOGGER.isTraceEnabled(); /** * Used to debug the consistency of the journal ordering. * <p/> * This is meant to be false as these extra checks would cause performance issues */ private static final boolean CHECK_CONSISTENCE = false; // This method exists just to make debug easier. // I could replace log.trace by log.info temporarily while I was debugging // Journal private static void trace(final String message) { ActiveMQJournalLogger.LOGGER.trace(message); } private final SequentialFileFactory fileFactory; private final JournalImpl journal; private final BlockingDeque<JournalFile> dataFiles = new LinkedBlockingDeque<JournalFile>(); private final ConcurrentLinkedQueue<JournalFile> freeFiles = new ConcurrentLinkedQueue<JournalFile>(); private final BlockingQueue<JournalFile> openedFiles = new LinkedBlockingQueue<JournalFile>(); private final AtomicLong nextFileID = new AtomicLong(0); private final int maxAIO; private final int minFiles; private final int fileSize; private final String filePrefix; private final String fileExtension; private final int userVersion; private final AtomicInteger freeFilesCount = new AtomicInteger(0); private Executor openFilesExecutor; private final Runnable pushOpenRunnable = new Runnable() { public void run() { try { pushOpenedFile(); } catch (Exception e) { ActiveMQJournalLogger.LOGGER.errorPushingFile(e); } } }; public JournalFilesRepository(final SequentialFileFactory fileFactory, final JournalImpl journal, final String filePrefix, final String fileExtension, final int userVersion, final int maxAIO, final int fileSize, final int minFiles) { if (filePrefix == null) { throw new IllegalArgumentException("filePrefix cannot be null"); } if (fileExtension == null) { throw new IllegalArgumentException("fileExtension cannot be null"); } if (maxAIO <= 0) { throw new IllegalArgumentException("maxAIO must be a positive number"); } this.fileFactory = fileFactory; this.maxAIO = maxAIO; this.filePrefix = filePrefix; this.fileExtension = fileExtension; this.minFiles = minFiles; this.fileSize = fileSize; this.userVersion = userVersion; this.journal = journal; } // Public -------------------------------------------------------- public void setExecutor(final Executor fileExecutor) { this.openFilesExecutor = fileExecutor; } public void clear() throws Exception { dataFiles.clear(); freeFiles.clear(); freeFilesCount.set(0); for (JournalFile file : openedFiles) { try { file.getFile().close(); } catch (Exception e) { ActiveMQJournalLogger.LOGGER.errorClosingFile(e); } } openedFiles.clear(); } public int getMaxAIO() { return maxAIO; } public String getFileExtension() { return fileExtension; } public String getFilePrefix() { return filePrefix; } public void calculateNextfileID(final List<JournalFile> files) { for (JournalFile file : files) { final long fileIdFromFile = file.getFileID(); final long fileIdFromName = getFileNameID(file.getFile().getFileName()); // The compactor could create a fileName but use a previously assigned ID. // Because of that we need to take both parts into account setNextFileID(Math.max(fileIdFromName, fileIdFromFile)); } } /** * Set the {link #nextFileID} value to {@code targetUpdate} if the current value is less than * {@code targetUpdate}. * <p/> * Notice that {@code nextFileID} is incremented before being used, see * {@link JournalFilesRepository#generateFileID()}. * * @param targetUpdate */ public void setNextFileID(final long targetUpdate) { while (true) { final long current = nextFileID.get(); if (current >= targetUpdate) return; if (nextFileID.compareAndSet(current, targetUpdate)) return; } } public void ensureMinFiles() throws Exception { int filesToCreate = minFiles - (dataFiles.size() + freeFilesCount.get()); if (filesToCreate > 0) { for (int i = 0; i < filesToCreate; i++) { // Keeping all files opened can be very costly (mainly on AIO) freeFiles.add(createFile(false, false, true, false, -1)); freeFilesCount.getAndIncrement(); } } } public void openFile(final JournalFile file, final boolean multiAIO) throws Exception { if (multiAIO) { file.getFile().open(); } else { file.getFile().open(1, false); } file.getFile().position(file.getFile().calculateBlockStart(JournalImpl.SIZE_HEADER)); } // Data File Operations ========================================== public JournalFile[] getDataFilesArray() { return dataFiles.toArray(new JournalFile[dataFiles.size()]); } public JournalFile pollLastDataFile() { return dataFiles.pollLast(); } public void removeDataFile(final JournalFile file) { if (!dataFiles.remove(file)) { ActiveMQJournalLogger.LOGGER.couldNotRemoveFile(file); } } public int getDataFilesCount() { return dataFiles.size(); } public Collection<JournalFile> getDataFiles() { return dataFiles; } public void clearDataFiles() { dataFiles.clear(); } public void addDataFileOnTop(final JournalFile file) { dataFiles.addFirst(file); if (CHECK_CONSISTENCE) { checkDataFiles(); } } public String debugFiles() { StringBuilder buffer = new StringBuilder(); buffer.append("**********\nCurrent File = " + journal.getCurrentFile() + "\n"); buffer.append("**********\nDataFiles:\n"); for (JournalFile file : dataFiles) { buffer.append(file.toString() + "\n"); } buffer.append("*********\nFreeFiles:\n"); for (JournalFile file : freeFiles) { buffer.append(file.toString() + "\n"); } return buffer.toString(); } public synchronized void checkDataFiles() { long seq = -1; for (JournalFile file : dataFiles) { if (file.getFileID() <= seq) { ActiveMQJournalLogger.LOGGER.checkFiles(); ActiveMQJournalLogger.LOGGER.info(debugFiles()); ActiveMQJournalLogger.LOGGER.seqOutOfOrder(); System.exit(-1); } if (journal.getCurrentFile() != null && journal.getCurrentFile().getFileID() <= file.getFileID()) { ActiveMQJournalLogger.LOGGER.checkFiles(); ActiveMQJournalLogger.LOGGER.info(debugFiles()); ActiveMQJournalLogger.LOGGER.currentFile(file.getFileID(), journal.getCurrentFile().getFileID(), file.getFileID(), (journal.getCurrentFile() == file)); // throw new RuntimeException ("Check failure!"); } if (journal.getCurrentFile() == file) { throw new RuntimeException("Check failure! Current file listed as data file!"); } seq = file.getFileID(); } long lastFreeId = -1; for (JournalFile file : freeFiles) { if (file.getFileID() <= lastFreeId) { ActiveMQJournalLogger.LOGGER.checkFiles(); ActiveMQJournalLogger.LOGGER.info(debugFiles()); ActiveMQJournalLogger.LOGGER.fileIdOutOfOrder(); throw new RuntimeException("Check failure!"); } lastFreeId = file.getFileID(); if (file.getFileID() < seq) { ActiveMQJournalLogger.LOGGER.checkFiles(); ActiveMQJournalLogger.LOGGER.info(debugFiles()); ActiveMQJournalLogger.LOGGER.fileTooSmall(); // throw new RuntimeException ("Check failure!"); } } } public void addDataFileOnBottom(final JournalFile file) { dataFiles.add(file); if (CHECK_CONSISTENCE) { checkDataFiles(); } } // Free File Operations ========================================== public int getFreeFilesCount() { return freeFilesCount.get(); } /** * @param file * @throws Exception */ public synchronized void addFreeFile(final JournalFile file, final boolean renameTmp) throws Exception { addFreeFile(file, renameTmp, true); } /** * @param file * @param renameTmp - should rename the file as it's being added to free files * @param checkDelete - should delete the file if max condition has been met * @throws Exception */ public synchronized void addFreeFile(final JournalFile file, final boolean renameTmp, final boolean checkDelete) throws Exception { long calculatedSize = 0; try { calculatedSize = file.getFile().size(); } catch (Exception e) { e.printStackTrace(); System.out.println("Can't get file size on " + file); System.exit(-1); } if (calculatedSize != fileSize) { ActiveMQJournalLogger.LOGGER.deletingFile(file); file.getFile().delete(); } else if (!checkDelete || (freeFilesCount.get() + dataFiles.size() + 1 + openedFiles.size() < minFiles)) { // Re-initialise it if (JournalFilesRepository.trace) { JournalFilesRepository.trace("Adding free file " + file); } JournalFile jf = reinitializeFile(file); if (renameTmp) { jf.getFile().renameTo(JournalImpl.renameExtensionFile(jf.getFile().getFileName(), ".tmp")); } freeFiles.add(jf); freeFilesCount.getAndIncrement(); } else { if (trace) { ActiveMQJournalLogger.LOGGER.trace("DataFiles.size() = " + dataFiles.size()); ActiveMQJournalLogger.LOGGER.trace("openedFiles.size() = " + openedFiles.size()); ActiveMQJournalLogger.LOGGER.trace("minfiles = " + minFiles); ActiveMQJournalLogger.LOGGER.trace("Free Files = " + freeFilesCount.get()); ActiveMQJournalLogger.LOGGER.trace("File " + file + " being deleted as freeFiles.size() + dataFiles.size() + 1 + openedFiles.size() (" + (freeFilesCount.get() + dataFiles.size() + 1 + openedFiles.size()) + ") < minFiles (" + minFiles + ")"); } file.getFile().delete(); } if (CHECK_CONSISTENCE) { checkDataFiles(); } } public Collection<JournalFile> getFreeFiles() { return freeFiles; } public JournalFile getFreeFile() { JournalFile file = freeFiles.remove(); freeFilesCount.getAndDecrement(); return file; } // Opened files operations ======================================= public int getOpenedFilesCount() { return openedFiles.size(); } /** * <p>This method will instantly return the opened file, and schedule opening and reclaiming.</p> * <p>In case there are no cached opened files, this method will block until the file was opened, * what would happen only if the system is under heavy load by another system (like a backup system, or a DB sharing the same box as ActiveMQ).</p> */ public JournalFile openFile() throws InterruptedException { if (JournalFilesRepository.trace) { JournalFilesRepository.trace("enqueueOpenFile with openedFiles.size=" + openedFiles.size()); } if (openFilesExecutor == null) { pushOpenRunnable.run(); } else { openFilesExecutor.execute(pushOpenRunnable); } JournalFile nextFile = null; while (nextFile == null) { nextFile = openedFiles.poll(5, TimeUnit.SECONDS); if (nextFile == null) { ActiveMQJournalLogger.LOGGER.errorOpeningFile(new Exception("trace")); } } if (JournalFilesRepository.trace) { JournalFilesRepository.trace("Returning file " + nextFile); } return nextFile; } /** * Open a file and place it into the openedFiles queue */ public void pushOpenedFile() throws Exception { JournalFile nextOpenedFile = takeFile(true, true, true, false); if (JournalFilesRepository.trace) { JournalFilesRepository.trace("pushing openFile " + nextOpenedFile); } if (!openedFiles.offer(nextOpenedFile)) { ActiveMQJournalLogger.LOGGER.failedToAddFile(nextOpenedFile); } } public void closeFile(final JournalFile file) throws Exception { fileFactory.deactivateBuffer(); file.getFile().close(); dataFiles.add(file); } /** * This will get a File from freeFile without initializing it * * @return uninitialized JournalFile * @throws Exception * @see {@link JournalImpl#initFileHeader(SequentialFileFactory, SequentialFile, int, long)} */ public JournalFile takeFile(final boolean keepOpened, final boolean multiAIO, final boolean initFile, final boolean tmpCompactExtension) throws Exception { JournalFile nextFile = null; nextFile = freeFiles.poll(); if (nextFile != null) { freeFilesCount.getAndDecrement(); } if (nextFile == null) { nextFile = createFile(keepOpened, multiAIO, initFile, tmpCompactExtension, -1); } else { if (tmpCompactExtension) { SequentialFile sequentialFile = nextFile.getFile(); sequentialFile.renameTo(sequentialFile.getFileName() + ".cmp"); } if (keepOpened) { openFile(nextFile, multiAIO); } } return nextFile; } /** * Creates files for journal synchronization of a replicated backup. * <p/> * In order to simplify synchronization, the file IDs in the backup match those in the live * server. * * @param fileID the fileID to use when creating the file. */ public JournalFile createRemoteBackupSyncFile(long fileID) throws Exception { return createFile(false, false, true, false, fileID); } /** * This method will create a new file on the file system, pre-fill it with FILL_CHARACTER * * @param keepOpened * @return an initialized journal file * @throws Exception */ private JournalFile createFile(final boolean keepOpened, final boolean multiAIO, final boolean init, final boolean tmpCompact, final long fileIdPreSet) throws Exception { if (System.getSecurityManager() == null) { return createFile0(keepOpened, multiAIO, init, tmpCompact, fileIdPreSet); } else { try { return AccessController.doPrivileged(new PrivilegedExceptionAction<JournalFile>() { @Override public JournalFile run() throws Exception { return createFile0(keepOpened, multiAIO, init, tmpCompact, fileIdPreSet); } }); } catch (PrivilegedActionException e) { throw unwrapException(e); } } } private RuntimeException unwrapException(PrivilegedActionException e) throws Exception { Throwable c = e.getCause(); if (c instanceof RuntimeException) { throw (RuntimeException) c; } else if (c instanceof Error) { throw (Error) c; } else { throw new RuntimeException(c); } } private JournalFile createFile0(final boolean keepOpened, final boolean multiAIO, final boolean init, final boolean tmpCompact, final long fileIdPreSet) throws Exception { long fileID = fileIdPreSet != -1 ? fileIdPreSet : generateFileID(); final String fileName = createFileName(tmpCompact, fileID); if (JournalFilesRepository.trace) { JournalFilesRepository.trace("Creating file " + fileName); } String tmpFileName = fileName + ".tmp"; SequentialFile sequentialFile = fileFactory.createSequentialFile(tmpFileName, maxAIO); sequentialFile.open(1, false); if (init) { sequentialFile.fill(0, fileSize, JournalImpl.FILL_CHARACTER); JournalImpl.initFileHeader(fileFactory, sequentialFile, userVersion, fileID); } long position = sequentialFile.position(); sequentialFile.close(); if (JournalFilesRepository.trace) { JournalFilesRepository.trace("Renaming file " + tmpFileName + " as " + fileName); } sequentialFile.renameTo(fileName); if (keepOpened) { if (multiAIO) { sequentialFile.open(); } else { sequentialFile.open(1, false); } sequentialFile.position(position); } return new JournalFileImpl(sequentialFile, fileID, JournalImpl.FORMAT_VERSION); } /** * @param tmpCompact * @param fileID * @return */ private String createFileName(final boolean tmpCompact, final long fileID) { String fileName; if (tmpCompact) { fileName = filePrefix + "-" + fileID + "." + fileExtension + ".cmp"; } else { fileName = filePrefix + "-" + fileID + "." + fileExtension; } return fileName; } private long generateFileID() { return nextFileID.incrementAndGet(); } /** * Get the ID part of the name */ private long getFileNameID(final String fileName) { try { return Long.parseLong(fileName.substring(filePrefix.length() + 1, fileName.indexOf('.'))); } catch (Throwable e) { ActiveMQJournalLogger.LOGGER.errorRetrievingID(e, fileName); return 0; } } // Discard the old JournalFile and set it with a new ID private JournalFile reinitializeFile(final JournalFile file) throws Exception { long newFileID = generateFileID(); SequentialFile sf = file.getFile(); sf.open(1, false); int position = JournalImpl.initFileHeader(fileFactory, sf, userVersion, newFileID); JournalFile jf = new JournalFileImpl(sf, newFileID, JournalImpl.FORMAT_VERSION); sf.position(position); sf.close(); return jf; } @Override public String toString() { return "JournalFilesRepository(dataFiles=" + dataFiles + ", freeFiles=" + freeFiles + ", openedFiles=" + openedFiles + ")"; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.views.textinput; import static com.facebook.react.uimanager.UIManagerHelper.getReactContext; import android.content.Context; import android.graphics.Rect; import android.graphics.Typeface; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.text.Editable; import android.text.InputType; import android.text.SpannableString; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; import android.text.TextWatcher; import android.text.method.KeyListener; import android.text.method.QwertyKeyListener; import android.util.TypedValue; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputConnection; import android.view.inputmethod.InputMethodManager; import androidx.annotation.Nullable; import androidx.appcompat.widget.AppCompatEditText; import androidx.core.view.AccessibilityDelegateCompat; import androidx.core.view.ViewCompat; import com.facebook.infer.annotation.Assertions; import com.facebook.react.bridge.ReactContext; import com.facebook.react.uimanager.FabricViewStateManager; import com.facebook.react.uimanager.UIManagerModule; import com.facebook.react.views.text.ReactSpan; import com.facebook.react.views.text.ReactTextUpdate; import com.facebook.react.views.text.ReactTypefaceUtils; import com.facebook.react.views.text.TextAttributes; import com.facebook.react.views.text.TextInlineImageSpan; import com.facebook.react.views.text.TextLayoutManager; import com.facebook.react.views.view.ReactViewBackgroundManager; import java.util.ArrayList; /** * A wrapper around the EditText that lets us better control what happens when an EditText gets * focused or blurred, and when to display the soft keyboard and when not to. * * <p>ReactEditTexts have setFocusableInTouchMode set to false automatically because touches on the * EditText are managed on the JS side. This also removes the nasty side effect that EditTexts have, * which is that focus is always maintained on one of the EditTexts. * * <p>The wrapper stops the EditText from triggering *TextChanged events, in the case where JS has * called this explicitly. This is the default behavior on other platforms as well. * VisibleForTesting from {@link TextInputEventsTestCase}. */ public class ReactEditText extends AppCompatEditText implements FabricViewStateManager.HasFabricViewStateManager { private final InputMethodManager mInputMethodManager; // This flag is set to true when we set the text of the EditText explicitly. In that case, no // *TextChanged events should be triggered. This is less expensive than removing the text // listeners and adding them back again after the text change is completed. protected boolean mIsSettingTextFromJS; private int mDefaultGravityHorizontal; private int mDefaultGravityVertical; /** A count of events sent to JS or C++. */ protected int mNativeEventCount; private static final int UNSET = -1; private @Nullable ArrayList<TextWatcher> mListeners; private @Nullable TextWatcherDelegator mTextWatcherDelegator; private int mStagedInputType; protected boolean mContainsImages; private @Nullable Boolean mBlurOnSubmit; private boolean mDisableFullscreen; private @Nullable String mReturnKeyType; private @Nullable SelectionWatcher mSelectionWatcher; private @Nullable ContentSizeWatcher mContentSizeWatcher; private @Nullable ScrollWatcher mScrollWatcher; private final InternalKeyListener mKeyListener; private boolean mDetectScrollMovement = false; private boolean mOnKeyPress = false; private TextAttributes mTextAttributes; private boolean mTypefaceDirty = false; private @Nullable String mFontFamily = null; private int mFontWeight = ReactTypefaceUtils.UNSET; private int mFontStyle = ReactTypefaceUtils.UNSET; private boolean mAutoFocus = false; private boolean mDidAttachToWindow = false; private ReactViewBackgroundManager mReactBackgroundManager; private final FabricViewStateManager mFabricViewStateManager = new FabricViewStateManager(); protected boolean mDisableTextDiffing = false; protected boolean mIsSettingTextFromState = false; private static final KeyListener sKeyListener = QwertyKeyListener.getInstanceForFullKeyboard(); public ReactEditText(Context context) { super(context); setFocusableInTouchMode(false); mReactBackgroundManager = new ReactViewBackgroundManager(this); mInputMethodManager = (InputMethodManager) Assertions.assertNotNull(context.getSystemService(Context.INPUT_METHOD_SERVICE)); mDefaultGravityHorizontal = getGravity() & (Gravity.HORIZONTAL_GRAVITY_MASK | Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK); mDefaultGravityVertical = getGravity() & Gravity.VERTICAL_GRAVITY_MASK; mNativeEventCount = 0; mIsSettingTextFromJS = false; mBlurOnSubmit = null; mDisableFullscreen = false; mListeners = null; mTextWatcherDelegator = null; mStagedInputType = getInputType(); mKeyListener = new InternalKeyListener(); mScrollWatcher = null; mTextAttributes = new TextAttributes(); applyTextAttributes(); // Turn off hardware acceleration for Oreo (T40484798) // see https://issuetracker.google.com/issues/67102093 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && Build.VERSION.SDK_INT <= Build.VERSION_CODES.O_MR1) { setLayerType(View.LAYER_TYPE_SOFTWARE, null); } ViewCompat.setAccessibilityDelegate( this, new AccessibilityDelegateCompat() { @Override public boolean performAccessibilityAction(View host, int action, Bundle args) { if (action == AccessibilityNodeInfo.ACTION_CLICK) { int length = getText().length(); if (length > 0) { // For some reason, when you swipe to focus on a text input that already has text in // it, it clears the selection and resets the cursor to the beginning of the input. // Since this is not typically (ever?) what you want, let's just explicitly set the // selection on accessibility click to undo that. setSelection(length); } return requestFocusInternal(); } return super.performAccessibilityAction(host, action, args); } }); } @Override protected void finalize() { TextLayoutManager.deleteCachedSpannableForTag(getId()); } // After the text changes inside an EditText, TextView checks if a layout() has been requested. // If it has, it will not scroll the text to the end of the new text inserted, but wait for the // next layout() to be called. However, we do not perform a layout() after a requestLayout(), so // we need to override isLayoutRequested to force EditText to scroll to the end of the new text // immediately. // TODO: t6408636 verify if we should schedule a layout after a View does a requestLayout() @Override public boolean isLayoutRequested() { return false; } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { onContentSizeChange(); } @Override public boolean onTouchEvent(MotionEvent ev) { switch (ev.getAction()) { case MotionEvent.ACTION_DOWN: mDetectScrollMovement = true; // Disallow parent views to intercept touch events, until we can detect if we should be // capturing these touches or not. this.getParent().requestDisallowInterceptTouchEvent(true); break; case MotionEvent.ACTION_MOVE: if (mDetectScrollMovement) { if (!canScrollVertically(-1) && !canScrollVertically(1) && !canScrollHorizontally(-1) && !canScrollHorizontally(1)) { // We cannot scroll, let parent views take care of these touches. this.getParent().requestDisallowInterceptTouchEvent(false); } mDetectScrollMovement = false; } break; } return super.onTouchEvent(ev); } // Consume 'Enter' key events: TextView tries to give focus to the next TextInput, but it can't // since we only allow JS to change focus, which in turn causes TextView to crash. @Override public boolean onKeyUp(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_ENTER && !isMultiline()) { hideSoftKeyboard(); return true; } return super.onKeyUp(keyCode, event); } @Override protected void onScrollChanged(int horiz, int vert, int oldHoriz, int oldVert) { super.onScrollChanged(horiz, vert, oldHoriz, oldVert); if (mScrollWatcher != null) { mScrollWatcher.onScrollChanged(horiz, vert, oldHoriz, oldVert); } } @Override public InputConnection onCreateInputConnection(EditorInfo outAttrs) { ReactContext reactContext = getReactContext(this); InputConnection inputConnection = super.onCreateInputConnection(outAttrs); if (inputConnection != null && mOnKeyPress) { inputConnection = new ReactEditTextInputConnectionWrapper(inputConnection, reactContext, this); } if (isMultiline() && getBlurOnSubmit()) { // Remove IME_FLAG_NO_ENTER_ACTION to keep the original IME_OPTION outAttrs.imeOptions &= ~EditorInfo.IME_FLAG_NO_ENTER_ACTION; } return inputConnection; } @Override public void clearFocus() { setFocusableInTouchMode(false); super.clearFocus(); hideSoftKeyboard(); } @Override public boolean requestFocus(int direction, Rect previouslyFocusedRect) { // This is a no-op so that when the OS calls requestFocus(), nothing will happen. ReactEditText // is a controlled component, which means its focus is controlled by JS, with two exceptions: // autofocus when it's attached to the window, and responding to accessibility events. In both // of these cases, we call requestFocusInternal() directly. return isFocused(); } private boolean requestFocusInternal() { setFocusableInTouchMode(true); // We must explicitly call this method on the super class; if we call requestFocus() without // any arguments, it will call into the overridden requestFocus(int, Rect) above, which no-ops. boolean focused = super.requestFocus(View.FOCUS_DOWN, null); if (getShowSoftInputOnFocus()) { showSoftKeyboard(); } return focused; } @Override public void addTextChangedListener(TextWatcher watcher) { if (mListeners == null) { mListeners = new ArrayList<>(); super.addTextChangedListener(getTextWatcherDelegator()); } mListeners.add(watcher); } @Override public void removeTextChangedListener(TextWatcher watcher) { if (mListeners != null) { mListeners.remove(watcher); if (mListeners.isEmpty()) { mListeners = null; super.removeTextChangedListener(getTextWatcherDelegator()); } } } public void setContentSizeWatcher(ContentSizeWatcher contentSizeWatcher) { mContentSizeWatcher = contentSizeWatcher; } public void setScrollWatcher(ScrollWatcher scrollWatcher) { mScrollWatcher = scrollWatcher; } /** * Attempt to set a selection or fail silently. Intentionally meant to handle bad inputs. * EventCounter is the same one used as with text. * * @param eventCounter * @param start * @param end */ public void maybeSetSelection(int eventCounter, int start, int end) { if (!canUpdateWithEventCount(eventCounter)) { return; } if (start != UNSET && end != UNSET) { setSelection(start, end); } } @Override public void setSelection(int start, int end) { super.setSelection(start, end); } @Override protected void onSelectionChanged(int selStart, int selEnd) { super.onSelectionChanged(selStart, selEnd); if (mSelectionWatcher != null && hasFocus()) { mSelectionWatcher.onSelectionChanged(selStart, selEnd); } } @Override protected void onFocusChanged(boolean focused, int direction, Rect previouslyFocusedRect) { super.onFocusChanged(focused, direction, previouslyFocusedRect); if (focused && mSelectionWatcher != null) { mSelectionWatcher.onSelectionChanged(getSelectionStart(), getSelectionEnd()); } } public void setSelectionWatcher(SelectionWatcher selectionWatcher) { mSelectionWatcher = selectionWatcher; } public void setBlurOnSubmit(@Nullable Boolean blurOnSubmit) { mBlurOnSubmit = blurOnSubmit; } public void setOnKeyPress(boolean onKeyPress) { mOnKeyPress = onKeyPress; } public boolean getBlurOnSubmit() { if (mBlurOnSubmit == null) { // Default blurOnSubmit return isMultiline() ? false : true; } return mBlurOnSubmit; } public void setDisableFullscreenUI(boolean disableFullscreenUI) { mDisableFullscreen = disableFullscreenUI; updateImeOptions(); } public boolean getDisableFullscreenUI() { return mDisableFullscreen; } public void setReturnKeyType(String returnKeyType) { mReturnKeyType = returnKeyType; updateImeOptions(); } public String getReturnKeyType() { return mReturnKeyType; } /*protected*/ int getStagedInputType() { return mStagedInputType; } /*package*/ void setStagedInputType(int stagedInputType) { mStagedInputType = stagedInputType; } /*package*/ void commitStagedInputType() { if (getInputType() != mStagedInputType) { int selectionStart = getSelectionStart(); int selectionEnd = getSelectionEnd(); setInputType(mStagedInputType); setSelection(selectionStart, selectionEnd); } } @Override public void setInputType(int type) { Typeface tf = super.getTypeface(); // Input type password defaults to monospace font, so we need to re-apply the font super.setTypeface(tf); super.setInputType(type); mStagedInputType = type; /** * If set forces multiline on input, because of a restriction on Android source that enables * multiline only for inputs of type Text and Multiline on method {@link * android.widget.TextView#isMultilineInputType(int)}} Source: {@Link <a * href='https://android.googlesource.com/platform/frameworks/base/+/jb-release/core/java/android/widget/TextView.java'>TextView.java</a>} */ if (isMultiline()) { setSingleLine(false); } // We override the KeyListener so that all keys on the soft input keyboard as well as hardware // keyboards work. Some KeyListeners like DigitsKeyListener will display the keyboard but not // accept all input from it mKeyListener.setInputType(type); setKeyListener(mKeyListener); } public void setFontFamily(String fontFamily) { mFontFamily = fontFamily; mTypefaceDirty = true; } public void setFontWeight(String fontWeightString) { int fontWeight = ReactTypefaceUtils.parseFontWeight(fontWeightString); if (fontWeight != mFontWeight) { mFontWeight = fontWeight; mTypefaceDirty = true; } } public void setFontStyle(String fontStyleString) { int fontStyle = ReactTypefaceUtils.parseFontStyle(fontStyleString); if (fontStyle != mFontStyle) { mFontStyle = fontStyle; mTypefaceDirty = true; } } public void maybeUpdateTypeface() { if (!mTypefaceDirty) { return; } mTypefaceDirty = false; Typeface newTypeface = ReactTypefaceUtils.applyStyles( getTypeface(), mFontStyle, mFontWeight, mFontFamily, getContext().getAssets()); setTypeface(newTypeface); } // VisibleForTesting from {@link TextInputEventsTestCase}. public void requestFocusFromJS() { requestFocusInternal(); } /* package */ void clearFocusFromJS() { clearFocus(); } // VisibleForTesting from {@link TextInputEventsTestCase}. public int incrementAndGetEventCounter() { return ++mNativeEventCount; } public void maybeSetTextFromJS(ReactTextUpdate reactTextUpdate) { mIsSettingTextFromJS = true; maybeSetText(reactTextUpdate); mIsSettingTextFromJS = false; } public void maybeSetTextFromState(ReactTextUpdate reactTextUpdate) { mIsSettingTextFromState = true; maybeSetText(reactTextUpdate); mIsSettingTextFromState = false; } public boolean canUpdateWithEventCount(int eventCounter) { return eventCounter >= mNativeEventCount; } // VisibleForTesting from {@link TextInputEventsTestCase}. public void maybeSetText(ReactTextUpdate reactTextUpdate) { if (isSecureText() && TextUtils.equals(getText(), reactTextUpdate.getText())) { return; } // Only set the text if it is up to date. if (!canUpdateWithEventCount(reactTextUpdate.getJsEventCounter())) { return; } // The current text gets replaced with the text received from JS. However, the spans on the // current text need to be adapted to the new text. Since TextView#setText() will remove or // reset some of these spans even if they are set directly, SpannableStringBuilder#replace() is // used instead (this is also used by the keyboard implementation underneath the covers). SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(reactTextUpdate.getText()); manageSpans(spannableStringBuilder); mContainsImages = reactTextUpdate.containsImages(); // When we update text, we trigger onChangeText code that will // try to update state if the wrapper is available. Temporarily disable // to prevent an (asynchronous) infinite loop. mDisableTextDiffing = true; // On some devices, when the text is cleared, buggy keyboards will not clear the composing // text so, we have to set text to null, which will clear the currently composing text. if (reactTextUpdate.getText().length() == 0) { setText(null); } else { // When we update text, we trigger onChangeText code that will // try to update state if the wrapper is available. Temporarily disable // to prevent an infinite loop. getText().replace(0, length(), spannableStringBuilder); } mDisableTextDiffing = false; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (getBreakStrategy() != reactTextUpdate.getTextBreakStrategy()) { setBreakStrategy(reactTextUpdate.getTextBreakStrategy()); } } // Update cached spans (in Fabric only) if (this.getFabricViewStateManager() != null) { TextLayoutManager.setCachedSpannabledForTag(getId(), spannableStringBuilder); } } /** * Remove and/or add {@link Spanned.SPAN_EXCLUSIVE_EXCLUSIVE} spans, since they should only exist * as long as the text they cover is the same. All other spans will remain the same, since they * will adapt to the new text, hence why {@link SpannableStringBuilder#replace} never removes * them. */ private void manageSpans(SpannableStringBuilder spannableStringBuilder) { Object[] spans = getText().getSpans(0, length(), Object.class); for (int spanIdx = 0; spanIdx < spans.length; spanIdx++) { // Remove all styling spans we might have previously set if (spans[spanIdx] instanceof ReactSpan) { getText().removeSpan(spans[spanIdx]); } if ((getText().getSpanFlags(spans[spanIdx]) & Spanned.SPAN_EXCLUSIVE_EXCLUSIVE) != Spanned.SPAN_EXCLUSIVE_EXCLUSIVE) { continue; } Object span = spans[spanIdx]; final int spanStart = getText().getSpanStart(spans[spanIdx]); final int spanEnd = getText().getSpanEnd(spans[spanIdx]); final int spanFlags = getText().getSpanFlags(spans[spanIdx]); // Make sure the span is removed from existing text, otherwise the spans we set will be // ignored or it will cover text that has changed. getText().removeSpan(spans[spanIdx]); if (sameTextForSpan(getText(), spannableStringBuilder, spanStart, spanEnd)) { spannableStringBuilder.setSpan(span, spanStart, spanEnd, spanFlags); } } } private static boolean sameTextForSpan( final Editable oldText, final SpannableStringBuilder newText, final int start, final int end) { if (start > newText.length() || end > newText.length()) { return false; } for (int charIdx = start; charIdx < end; charIdx++) { if (oldText.charAt(charIdx) != newText.charAt(charIdx)) { return false; } } return true; } protected boolean showSoftKeyboard() { return mInputMethodManager.showSoftInput(this, 0); } protected void hideSoftKeyboard() { mInputMethodManager.hideSoftInputFromWindow(getWindowToken(), 0); } private TextWatcherDelegator getTextWatcherDelegator() { if (mTextWatcherDelegator == null) { mTextWatcherDelegator = new TextWatcherDelegator(); } return mTextWatcherDelegator; } /* package */ boolean isMultiline() { return (getInputType() & InputType.TYPE_TEXT_FLAG_MULTI_LINE) != 0; } private boolean isSecureText() { return (getInputType() & (InputType.TYPE_NUMBER_VARIATION_PASSWORD | InputType.TYPE_TEXT_VARIATION_PASSWORD)) != 0; } private void onContentSizeChange() { if (mContentSizeWatcher != null) { mContentSizeWatcher.onLayout(); } setIntrinsicContentSize(); } // TODO T58784068: delete this method private void setIntrinsicContentSize() { // This serves as a check for whether we're running under Paper or Fabric. // By the time this is called, in Fabric we will have a state // wrapper 100% of the time. // Since the LocalData object is constructed by getting values from the underlying EditText // view, we don't need to construct one or apply it at all - it provides no use in Fabric. if (!mFabricViewStateManager.hasStateWrapper()) { ReactContext reactContext = getReactContext(this); final ReactTextInputLocalData localData = new ReactTextInputLocalData(this); UIManagerModule uiManager = reactContext.getNativeModule(UIManagerModule.class); if (uiManager != null) { uiManager.setViewLocalData(getId(), localData); } } } /* package */ void setGravityHorizontal(int gravityHorizontal) { if (gravityHorizontal == 0) { gravityHorizontal = mDefaultGravityHorizontal; } setGravity( (getGravity() & ~Gravity.HORIZONTAL_GRAVITY_MASK & ~Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK) | gravityHorizontal); } /* package */ void setGravityVertical(int gravityVertical) { if (gravityVertical == 0) { gravityVertical = mDefaultGravityVertical; } setGravity((getGravity() & ~Gravity.VERTICAL_GRAVITY_MASK) | gravityVertical); } private void updateImeOptions() { // Default to IME_ACTION_DONE int returnKeyFlag = EditorInfo.IME_ACTION_DONE; if (mReturnKeyType != null) { switch (mReturnKeyType) { case "go": returnKeyFlag = EditorInfo.IME_ACTION_GO; break; case "next": returnKeyFlag = EditorInfo.IME_ACTION_NEXT; break; case "none": returnKeyFlag = EditorInfo.IME_ACTION_NONE; break; case "previous": returnKeyFlag = EditorInfo.IME_ACTION_PREVIOUS; break; case "search": returnKeyFlag = EditorInfo.IME_ACTION_SEARCH; break; case "send": returnKeyFlag = EditorInfo.IME_ACTION_SEND; break; case "done": returnKeyFlag = EditorInfo.IME_ACTION_DONE; break; } } if (mDisableFullscreen) { setImeOptions(returnKeyFlag | EditorInfo.IME_FLAG_NO_FULLSCREEN); } else { setImeOptions(returnKeyFlag); } } @Override protected boolean verifyDrawable(Drawable drawable) { if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { if (span.getDrawable() == drawable) { return true; } } } return super.verifyDrawable(drawable); } @Override public void invalidateDrawable(Drawable drawable) { if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { if (span.getDrawable() == drawable) { invalidate(); } } } super.invalidateDrawable(drawable); } @Override public void onDetachedFromWindow() { super.onDetachedFromWindow(); if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { span.onDetachedFromWindow(); } } } @Override public void onStartTemporaryDetach() { super.onStartTemporaryDetach(); if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { span.onStartTemporaryDetach(); } } } @Override public void onAttachedToWindow() { super.onAttachedToWindow(); if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { span.onAttachedToWindow(); } } if (mAutoFocus && !mDidAttachToWindow) { requestFocusInternal(); } mDidAttachToWindow = true; } @Override public void onFinishTemporaryDetach() { super.onFinishTemporaryDetach(); if (mContainsImages) { Spanned text = getText(); TextInlineImageSpan[] spans = text.getSpans(0, text.length(), TextInlineImageSpan.class); for (TextInlineImageSpan span : spans) { span.onFinishTemporaryDetach(); } } } @Override public void setBackgroundColor(int color) { mReactBackgroundManager.setBackgroundColor(color); } public void setBorderWidth(int position, float width) { mReactBackgroundManager.setBorderWidth(position, width); } public void setBorderColor(int position, float color, float alpha) { mReactBackgroundManager.setBorderColor(position, color, alpha); } public void setBorderRadius(float borderRadius) { mReactBackgroundManager.setBorderRadius(borderRadius); } public void setBorderRadius(float borderRadius, int position) { mReactBackgroundManager.setBorderRadius(borderRadius, position); } public void setBorderStyle(@Nullable String style) { mReactBackgroundManager.setBorderStyle(style); } public void setLetterSpacingPt(float letterSpacingPt) { mTextAttributes.setLetterSpacing(letterSpacingPt); applyTextAttributes(); } public void setAllowFontScaling(boolean allowFontScaling) { if (mTextAttributes.getAllowFontScaling() != allowFontScaling) { mTextAttributes.setAllowFontScaling(allowFontScaling); applyTextAttributes(); } } public void setFontSize(float fontSize) { mTextAttributes.setFontSize(fontSize); applyTextAttributes(); } public void setMaxFontSizeMultiplier(float maxFontSizeMultiplier) { if (maxFontSizeMultiplier != mTextAttributes.getMaxFontSizeMultiplier()) { mTextAttributes.setMaxFontSizeMultiplier(maxFontSizeMultiplier); applyTextAttributes(); } } public void setAutoFocus(boolean autoFocus) { mAutoFocus = autoFocus; } protected void applyTextAttributes() { // In general, the `getEffective*` functions return `Float.NaN` if the // property hasn't been set. // `getEffectiveFontSize` always returns a value so don't need to check for anything like // `Float.NaN`. setTextSize(TypedValue.COMPLEX_UNIT_PX, mTextAttributes.getEffectiveFontSize()); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { float effectiveLetterSpacing = mTextAttributes.getEffectiveLetterSpacing(); if (!Float.isNaN(effectiveLetterSpacing)) { setLetterSpacing(effectiveLetterSpacing); } } } @Override public FabricViewStateManager getFabricViewStateManager() { return mFabricViewStateManager; } /** * This class will redirect *TextChanged calls to the listeners only in the case where the text is * changed by the user, and not explicitly set by JS. */ private class TextWatcherDelegator implements TextWatcher { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { if (!mIsSettingTextFromJS && mListeners != null) { for (TextWatcher listener : mListeners) { listener.beforeTextChanged(s, start, count, after); } } } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { if (!mIsSettingTextFromJS && mListeners != null) { for (TextWatcher listener : mListeners) { listener.onTextChanged(s, start, before, count); } } if (getFabricViewStateManager() != null) { TextLayoutManager.setCachedSpannabledForTag(getId(), new SpannableString(getText())); } onContentSizeChange(); } @Override public void afterTextChanged(Editable s) { if (!mIsSettingTextFromJS && mListeners != null) { for (TextWatcher listener : mListeners) { listener.afterTextChanged(s); } } } } /* * This class is set as the KeyListener for the underlying TextView * It does two things * 1) Provides the same answer to getInputType() as the real KeyListener would have which allows * the proper keyboard to pop up on screen * 2) Permits all keyboard input through */ private static class InternalKeyListener implements KeyListener { private int mInputType = 0; public InternalKeyListener() {} public void setInputType(int inputType) { mInputType = inputType; } /* * getInputType will return whatever value is passed in. This will allow the proper keyboard * to be shown on screen but without the actual filtering done by other KeyListeners */ @Override public int getInputType() { return mInputType; } /* * All overrides of key handling defer to the underlying KeyListener which is shared by all * ReactEditText instances. It will basically allow any/all keyboard input whether from * physical keyboard or from soft input. */ @Override public boolean onKeyDown(View view, Editable text, int keyCode, KeyEvent event) { return sKeyListener.onKeyDown(view, text, keyCode, event); } @Override public boolean onKeyUp(View view, Editable text, int keyCode, KeyEvent event) { return sKeyListener.onKeyUp(view, text, keyCode, event); } @Override public boolean onKeyOther(View view, Editable text, KeyEvent event) { return sKeyListener.onKeyOther(view, text, event); } @Override public void clearMetaKeyState(View view, Editable content, int states) { sKeyListener.clearMetaKeyState(view, content, states); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.leaderelection; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.TestingManualHighAvailabilityServices; import org.apache.flink.runtime.instance.ActorGateway; import org.apache.flink.runtime.io.network.partition.ResultPartitionType; import org.apache.flink.runtime.jobgraph.DistributionPattern; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobmanager.Tasks; import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup; import org.apache.flink.runtime.messages.JobManagerMessages; import org.apache.flink.runtime.testingUtils.TestingCluster; import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.NotifyWhenJobRemoved; import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.WaitForAllVerticesToBeRunningOrFinished; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.util.TestLogger; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.*; public class LeaderChangeStateCleanupTest extends TestLogger { private static Logger LOG = LoggerFactory.getLogger(LeaderChangeStateCleanupTest.class); private static FiniteDuration timeout = TestingUtils.TESTING_DURATION(); private int numJMs = 2; private int numTMs = 2; private int numSlotsPerTM = 2; private int parallelism = numTMs * numSlotsPerTM; private JobID jobId; private Configuration configuration; private TestingManualHighAvailabilityServices highAvailabilityServices; private TestingCluster cluster = null; private JobGraph job = createBlockingJob(parallelism); @Before public void before() throws Exception { jobId = HighAvailabilityServices.DEFAULT_JOB_ID; Tasks.BlockingOnceReceiver$.MODULE$.blocking_$eq(true); configuration = new Configuration(); configuration.setInteger(ConfigConstants.LOCAL_NUMBER_JOB_MANAGER, numJMs); configuration.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, numTMs); configuration.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, numSlotsPerTM); highAvailabilityServices = new TestingManualHighAvailabilityServices(); cluster = new TestingCluster( configuration, highAvailabilityServices, true, false); cluster.start(false); // TaskManagers don't have to register at the JobManager cluster.waitForActorsToBeAlive(); // we only wait until all actors are alive } @After public void after() { if(cluster != null) { cluster.stop(); } } /** * Tests that a job is properly canceled in the case of a leader change. In such an event all * TaskManagers have to disconnect from the previous leader and connect to the newly elected * leader. */ @Test public void testStateCleanupAfterNewLeaderElectionAndListenerNotification() throws Exception { UUID leaderSessionID1 = UUID.randomUUID(); UUID leaderSessionID2 = UUID.randomUUID(); // first make JM(0) the leader highAvailabilityServices.grantLeadership(jobId, 0, leaderSessionID1); // notify all listeners highAvailabilityServices.notifyRetrievers(jobId, 0, leaderSessionID1); cluster.waitForTaskManagersToBeRegistered(timeout); // submit blocking job so that it is not finished when we cancel it cluster.submitJobDetached(job); ActorGateway jm = cluster.getLeaderGateway(timeout); Future<Object> wait = jm.ask(new WaitForAllVerticesToBeRunningOrFinished(job.getJobID()), timeout); Await.ready(wait, timeout); Future<Object> jobRemoval = jm.ask(new NotifyWhenJobRemoved(job.getJobID()), timeout); // make the JM(1) the new leader highAvailabilityServices.grantLeadership(jobId, 1, leaderSessionID2); // notify all listeners about the event highAvailabilityServices.notifyRetrievers(jobId, 1, leaderSessionID2); Await.ready(jobRemoval, timeout); cluster.waitForTaskManagersToBeRegistered(timeout); ActorGateway jm2 = cluster.getLeaderGateway(timeout); Future<Object> futureNumberSlots = jm2.ask(JobManagerMessages.getRequestTotalNumberOfSlots(), timeout); // check that all TMs have registered at the new leader int numberSlots = (Integer)Await.result(futureNumberSlots, timeout); assertEquals(parallelism, numberSlots); // try to resubmit now the non-blocking job, it should complete successfully Tasks.BlockingOnceReceiver$.MODULE$.blocking_$eq(false); cluster.submitJobAndWait(job, false, timeout); } /** * Tests that a job is properly canceled in the case of a leader change. However, this time only the * JMs are notified about the leader change and the TMs still believe the old leader to have * leadership. */ @Test public void testStateCleanupAfterNewLeaderElection() throws Exception { UUID leaderSessionID = UUID.randomUUID(); UUID newLeaderSessionID = UUID.randomUUID(); highAvailabilityServices.grantLeadership(jobId, 0, leaderSessionID); highAvailabilityServices.notifyRetrievers(jobId, 0, leaderSessionID); cluster.waitForTaskManagersToBeRegistered(timeout); // submit blocking job so that we can test job clean up cluster.submitJobDetached(job); ActorGateway jm = cluster.getLeaderGateway(timeout); Future<Object> wait = jm.ask(new WaitForAllVerticesToBeRunningOrFinished(job.getJobID()), timeout); Await.ready(wait, timeout); Future<Object> jobRemoval = jm.ask(new NotifyWhenJobRemoved(job.getJobID()), timeout); // only notify the JMs about the new leader JM(1) highAvailabilityServices.grantLeadership(jobId, 1, newLeaderSessionID); // job should be removed anyway Await.ready(jobRemoval, timeout); } /** * Tests that a job is properly canceled in the event of a leader change. However, this time * only the TMs are notified about the changing leader. This should be enough to cancel the * currently running job, though. */ @Test public void testStateCleanupAfterListenerNotification() throws Exception { UUID leaderSessionID = UUID.randomUUID(); UUID newLeaderSessionID = UUID.randomUUID(); highAvailabilityServices.grantLeadership(jobId, 0, leaderSessionID); highAvailabilityServices.notifyRetrievers(jobId, 0, leaderSessionID); cluster.waitForTaskManagersToBeRegistered(timeout); // submit blocking job cluster.submitJobDetached(job); ActorGateway jm = cluster.getLeaderGateway(timeout); Future<Object> wait = jm.ask(new WaitForAllVerticesToBeRunningOrFinished(job.getJobID()), timeout); Await.ready(wait, timeout); Future<Object> jobRemoval = jm.ask(new NotifyWhenJobRemoved(job.getJobID()), timeout); // notify listeners (TMs) about the leader change highAvailabilityServices.notifyRetrievers(jobId, 1, newLeaderSessionID); Await.ready(jobRemoval, timeout); } /** * Tests that the same JobManager can be reelected as the leader. Even though, the same JM * is elected as the next leader, all currently running jobs should be canceled properly and * all TMs should disconnect from the leader and then reconnect to it. */ @Test public void testReelectionOfSameJobManager() throws Exception { UUID leaderSessionID = UUID.randomUUID(); UUID newLeaderSessionID = UUID.randomUUID(); FiniteDuration shortTimeout = new FiniteDuration(10, TimeUnit.SECONDS); highAvailabilityServices.grantLeadership(jobId, 0, leaderSessionID); highAvailabilityServices.notifyRetrievers(jobId, 0, leaderSessionID); cluster.waitForTaskManagersToBeRegistered(timeout); // submit blocking job cluster.submitJobDetached(job); ActorGateway jm = cluster.getLeaderGateway(timeout); Future<Object> wait = jm.ask(new WaitForAllVerticesToBeRunningOrFinished(job.getJobID()), timeout); Await.ready(wait, timeout); Future<Object> jobRemoval = jm.ask(new NotifyWhenJobRemoved(job.getJobID()), timeout); LOG.info("Make JM(0) again the leader. This should first revoke the leadership."); // make JM(0) again the leader --> this implies first a leadership revocation highAvailabilityServices.grantLeadership(jobId, 0, newLeaderSessionID); Await.ready(jobRemoval, timeout); LOG.info("Job removed."); // The TMs should not be able to reconnect since they don't know the current leader // session ID try { cluster.waitForTaskManagersToBeRegistered(shortTimeout); fail("TaskManager should not be able to register at JobManager."); } catch (TimeoutException e) { // expected exception since the TMs have still the old leader session ID } LOG.info("Notify TMs about the new (old) leader."); // notify the TMs about the new (old) leader highAvailabilityServices.notifyRetrievers(jobId,0, newLeaderSessionID); cluster.waitForTaskManagersToBeRegistered(timeout); ActorGateway leaderGateway = cluster.getLeaderGateway(timeout); // try to resubmit now the non-blocking job, it should complete successfully Tasks.BlockingOnceReceiver$.MODULE$.blocking_$eq(false); cluster.submitJobAndWait(job, false, timeout); } public JobGraph createBlockingJob(int parallelism) { Tasks.BlockingOnceReceiver$.MODULE$.blocking_$eq(true); JobVertex sender = new JobVertex("sender"); JobVertex receiver = new JobVertex("receiver"); sender.setInvokableClass(Tasks.Sender.class); receiver.setInvokableClass(Tasks.BlockingOnceReceiver.class); sender.setParallelism(parallelism); receiver.setParallelism(parallelism); receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); SlotSharingGroup slotSharingGroup = new SlotSharingGroup(); sender.setSlotSharingGroup(slotSharingGroup); receiver.setSlotSharingGroup(slotSharingGroup); return new JobGraph("Blocking test job", sender, receiver); } }
package de.undercouch.citeproc; import de.undercouch.citeproc.bibtex.BibTeXConverter; import de.undercouch.citeproc.bibtex.BibTeXItemDataProvider; import de.undercouch.citeproc.csl.CSLCitation; import de.undercouch.citeproc.csl.CSLItemData; import de.undercouch.citeproc.helper.json.JsonLexer; import de.undercouch.citeproc.helper.json.JsonParser; import de.undercouch.citeproc.output.Bibliography; import de.undercouch.citeproc.output.Citation; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.graalvm.polyglot.Context; import org.graalvm.polyglot.Engine; import org.graalvm.polyglot.HostAccess; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import org.jbibtex.BibTeXDatabase; import org.jbibtex.ParseException; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.yaml.snakeyaml.Yaml; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import static org.junit.Assert.assertEquals; @RunWith(Parameterized.class) public class FixturesTest { private static final String FIXTURES_DIR = "/fixtures"; private static final String TEST_SUITE_DIR = "/test-suite/processor-tests/humans"; private static final String TEST_SUITE_OVERRIDES_DIR = "/test-suite-overrides"; private static final String CITEPROC_JS_FILE = "/citeproc-js/citeproc.js"; private static final Map<String, ItemDataProvider> bibliographyFileCache = new HashMap<>(); /** * {@code true} if the test should be run in experimental mode */ private final boolean experimentalMode; /** * The output format to generate */ private final String outputFormat; /** * The expected rendered result */ private final String expectedResult; /** * The test data */ private final Map<String, Object> data; /** * Shared JavaScript engine */ private static Engine engine; /** * Source of citeproc.js */ private static Source citeprocJsSource; /** * Initialize shared JavaScript engine */ @BeforeClass public static void startUpClass() throws IOException { engine = Engine.create(); URL citeprocJsUrl = FixturesTest.class.getResource(CITEPROC_JS_FILE); assert citeprocJsUrl != null; String citeprocJs = IOUtils.toString(citeprocJsUrl, StandardCharsets.UTF_8); citeprocJsSource = Source.create("js", citeprocJs); } /** * Close shared JavaScript engine */ @AfterClass public static void tearDownClass() { engine.close(); } /** * Get a map of expected results from test fixture data * @param data the data * @param propertyName the name of the property holding the expected results * @return the expected results */ @SuppressWarnings("unchecked") private static Map<String, String> readExpectedResults(Map<String, Object> data, String propertyName) { Object expectedResultObj = data.get(propertyName); if (expectedResultObj instanceof String) { String str = (String)expectedResultObj; Map<String, String> map = new HashMap<>(); map.put("text", str); expectedResultObj = map; } return (Map<String, String>)expectedResultObj; } /** * Read a file from the CSL test suite and convert it to the same format * as our test fixtures * @param f the file to read * @return the parsed data object * @throws IOException if the file could not be read */ private static Map<String, Object> cslTestSuiteFileToData(File f) throws IOException { Map<String, Object> result = new HashMap<>(); Pattern startPattern = Pattern.compile("^\\s*>>=+\\s*(.*?)\\s*=+>>\\s*$"); Pattern endPattern = Pattern.compile("^\\s*<<=+\\s*(.*?)\\s*=+<<\\s*$"); String currentKey = null; StringBuilder currentValue = null; try (BufferedReader br = Files.newBufferedReader(f.toPath())) { String line; while ((line = br.readLine()) != null) { if (currentKey == null) { Matcher m = startPattern.matcher(line); if (m.matches()) { currentKey = m.group(1); currentValue = new StringBuilder(); } } else { Matcher m = endPattern.matcher(line); if (m.matches()) { String value = currentValue.toString().trim(); switch (currentKey.toLowerCase()) { case "mode": result.put("mode", value); break; case "result": { if (value.startsWith("<div")) { Map<String, Object> htmlMap = new HashMap<>(); htmlMap.put("html", value); result.put("result", htmlMap); } else { result.put("result", value); } break; } case "csl": result.put("style", value); break; case "input": { JsonParser parser = new JsonParser( new JsonLexer(new StringReader(value))); List<Object> items = parser.parseArray(); result.put("items", items); break; } case "citation-items": { JsonParser parser = new JsonParser( new JsonLexer(new StringReader(value))); List<Object> citationItems = parser.parseArray(); List<Map<String, Object>> citations = new ArrayList<>(); for (Object citationItem : citationItems) { Map<String, Object> citation = new HashMap<>(); citation.put("citationItems", citationItem); citations.add(citation); } result.put("citations", citations); break; } case "bibentries": { JsonParser parser = new JsonParser( new JsonLexer(new StringReader(value))); List<Object> itemIds = parser.parseArray(); result.put("itemIds", itemIds); break; } case "abbreviations": { JsonParser parser = new JsonParser( new JsonLexer(new StringReader(value))); Map<String, Object> abbrevs = parser.parseObject(); result.put("abbreviations", abbrevs.get("default")); break; } } currentKey = null; } else { currentValue.append(line).append("\n"); } } } } if (!"citation".equals(result.get("mode"))) { // `citations' can only be defined if `mode' equals `citation' result.remove("citations"); } return result; } /** * Get all test files */ @Parameterized.Parameters(name = "{0}, {1}, {2}") @SuppressWarnings("unchecked") public static Iterable<Object[]> data() { URL fixturesUrl = FixturesTest.class.getResource(FIXTURES_DIR); URL testSuiteUrl = FixturesTest.class.getResource(TEST_SUITE_DIR); URL testSuiteOverridesUrl = FixturesTest.class.getResource(TEST_SUITE_OVERRIDES_DIR); File fixturesDir = new File(fixturesUrl.getPath()); File testSuiteDir = new File(testSuiteUrl.getPath()); File testSuiteOverridesDir = new File(testSuiteOverridesUrl.getPath()); // read test fixtures Stream<Map<String, Object>> fixturesStream = FileUtils.listFiles(fixturesDir, new String[]{"yaml"}, true) .stream() .map(f -> { Map<String, Object> data; Yaml yaml = new Yaml(); try (FileInputStream is = new FileInputStream(f)) { data = yaml.loadAs(is, Map.class); } catch (IOException e) { throw new RuntimeException(e); } data.put("__name", f.getName().substring(0, f.getName().length() - 5)); return data; }); // read fixtures from CSL test suite Stream<Map<String, Object>> testSuiteStream = Stream.of(TEST_SUITE_TESTS) .map(name -> { // read test suite file Map<String, Object> data; try { data = cslTestSuiteFileToData(new File(testSuiteDir, name + ".txt")); } catch (IOException e) { throw new RuntimeException(e); } // override attributes if there is an override file File overridesFiles = new File(testSuiteOverridesDir, name + ".yaml"); if (overridesFiles.exists()) { Map<String, Object> overrides; Yaml yaml = new Yaml(); try (FileInputStream is = new FileInputStream(overridesFiles)) { overrides = yaml.loadAs(is, Map.class); } catch (IOException e) { throw new RuntimeException(e); } // rename "result" to "resultLegacy" and handle HTML output Object overridesResult = overrides.get("result"); if (overridesResult != null) { Object oldResult = data.get("result"); data.put("resultLegacy", oldResult); if (overridesResult instanceof Map && oldResult instanceof String) { Map<String, Object> overridesResultMap = (Map<String, Object>)overridesResult; if (overridesResultMap.get("html") != null) { Map<String, Object> resultLegacyMap = new HashMap<>(); resultLegacyMap.put("html", oldResult); data.put("resultLegacy", resultLegacyMap); } } } data.putAll(overrides); } data.put("__name", name); return data; }); // convert test fixtures to parameters Stream<Map<String, Object>> dataStream = Stream.concat(testSuiteStream, fixturesStream); return dataStream.flatMap(data -> { Map<String, String> expectedResults = readExpectedResults(data, "result"); Map<String, String> expectedResultsLegacy; if (data.containsKey("resultLegacy")) { expectedResultsLegacy = readExpectedResults(data, "resultLegacy"); } else { expectedResultsLegacy = expectedResults; } String strExperimentalMode = (String)data.get("experimentalMode"); boolean experimentalOnly = "only".equals(strExperimentalMode); Stream<Boolean> s; if (experimentalOnly) { s = Stream.of(true); } else { s = Stream.of(true, false); } return s.flatMap(experimentalMode -> { Map<String, String> er = expectedResults; if (!experimentalMode) { er = expectedResultsLegacy; } return er.entrySet().stream().map(expectedResult -> new Object[] { data.get("__name"), experimentalMode, expectedResult.getKey(), expectedResult.getValue(), data } ); }); }).collect(Collectors.toList()); } /** * Create a new test * @param name the name of the test file * @param experimentalMode {@code true} if the test should be run in * experimental mode * @param outputFormat the output format to generate * @param expectedResult the expected rendered result * @param data the test data */ public FixturesTest(@SuppressWarnings("unused") String name, boolean experimentalMode, String outputFormat, String expectedResult, Map<String, Object> data) { this.experimentalMode = experimentalMode; this.outputFormat = outputFormat; this.expectedResult = expectedResult; this.data = data; } private static ItemDataProvider loadBibliographyFile(String filename) throws IOException { ItemDataProvider result = bibliographyFileCache.get(filename); if (result == null) { BibTeXDatabase db; try (InputStream is = FixturesTest.class.getResourceAsStream(filename); BufferedInputStream bis = new BufferedInputStream(is)) { InputStream tis = bis; if (filename.endsWith(".gz")) { tis = new GZIPInputStream(bis); } db = new BibTeXConverter().loadDatabase(tis); } catch (ParseException e) { throw new IOException(e); } BibTeXItemDataProvider r = new BibTeXItemDataProvider(); r.addDatabase(db); result = r; bibliographyFileCache.put(filename, result); } return result; } /** * Run a test from the test suite * @throws IOException if an I/O error occurred */ @Test @SuppressWarnings("unchecked") public void run() throws IOException { String mode = (String)data.get("mode"); String style = (String)data.get("style"); // get bibliography file ItemDataProvider itemDataProvider = null; String bibliographyFile = (String)data.get("bibliographyFile"); if (bibliographyFile != null) { itemDataProvider = loadBibliographyFile(bibliographyFile); } // get item data List<Map<String, Object>> rawItems = (List<Map<String, Object>>)data.get("items"); if (rawItems != null && bibliographyFile != null) { throw new IllegalStateException("Found both `bibliographyFile' " + "and `items'. Define only one of them."); } // convert item data if (rawItems != null) { CSLItemData[] items = new CSLItemData[rawItems.size()]; for (int i = 0; i < items.length; ++i) { items[i] = CSLItemData.fromJson(rawItems.get(i)); } itemDataProvider = new ListItemDataProvider(items); } if (itemDataProvider == null) { throw new IllegalStateException("Either `bibliographyFile' or " + "`items' must be specified."); } // get abbreviations DefaultAbbreviationProvider abbreviationProvider = null; if (data.containsKey("abbreviations")) { abbreviationProvider = new DefaultAbbreviationProvider(); Map<String, Map<String, String>> abbrevs = (Map<String, Map<String, String>>)data.get("abbreviations"); for (Map.Entry<String, Map<String, String>> vm : abbrevs.entrySet()) { for (Map.Entry<String, String> om : vm.getValue().entrySet()) { abbreviationProvider.addAbbreviation(vm.getKey(), om.getKey(), om.getValue()); } } } // get the item IDs to test against List<Collection<String>> itemIds; List<Object> itemIdsListObj = (List<Object>)data.get("itemIds"); if (itemIdsListObj != null && !itemIdsListObj.isEmpty() && itemIdsListObj.get(0) instanceof String) { ArrayList<String> iis = new ArrayList<>(); for (Object o : itemIdsListObj) { iis.add((String)o); } itemIds = Collections.singletonList(iis); } else if (itemIdsListObj != null) { itemIds = new ArrayList<>(); for (Object o : itemIdsListObj) { List<String> l = (List<String>)o; itemIds.add(l); } } else { itemIds = Collections.singletonList(itemDataProvider.getIds()); } // get the raw citations List<Map<String, Object>> rawCitations = (List<Map<String, Object>>)data.get("citations"); if (rawCitations != null && !"citation".equals(mode)) { throw new IllegalStateException("`citations' can only be defined " + "if `mode' equals `citation'."); } if (rawCitations != null && itemIdsListObj != null) { throw new IllegalStateException("Found both `itemIds' and " + "`citations'. Define only one of them."); } // converts citations List<CSLCitation> citations = null; if (rawCitations != null) { citations = new ArrayList<>(); for (Map<String, Object> raw : rawCitations) { citations.add(CSLCitation.fromJson(raw)); } } String actualResult; if (experimentalMode) { actualResult = applyCSL(mode, style, itemDataProvider, abbreviationProvider, itemIds, citations); } else { actualResult = applyCiteprocJs(mode, style, itemDataProvider, abbreviationProvider, itemIds, citations); } // compare result assertEquals(expectedResult, actualResult); } private String applyCSL(String mode, String style, ItemDataProvider itemDataProvider, AbbreviationProvider abbreviationProvider, List<Collection<String>> itemIds, List<CSLCitation> citations) throws IOException { // create CSL processor CSL citeproc = new CSLBuilder() .itemDataProvider(itemDataProvider) .style(style) .abbreviationProvider(abbreviationProvider) .build(); citeproc.setOutputFormat(outputFormat); citeproc.setConvertLinks(true); // register citation items for (Collection<String> ii : itemIds) { citeproc.registerCitationItems(ii); } String actualResult; if ("bibliography".equals(mode)) { Bibliography bibl = citeproc.makeBibliography(); actualResult = bibl.makeString(); } else if ("citation".equals(mode)) { List<Citation> generatedCitations = new ArrayList<>(); if (citations != null) { for (CSLCitation c : citations) { generatedCitations.addAll(citeproc.makeCitation(c)); } } else { String[] ii = citeproc.getRegisteredItems().stream() .map(CSLItemData::getId) .toArray(String[]::new); generatedCitations.addAll(citeproc.makeCitation(ii)); } actualResult = generatedCitations.stream() .map(Citation::getText) .collect(Collectors.joining("\n")); } else { throw new IllegalStateException("Unknown mode: " + mode); } return actualResult; } private String applyCiteprocJs(String mode, String style, ItemDataProvider itemDataProvider, AbbreviationProvider abbreviationProvider, List<Collection<String>> itemIds, List<CSLCitation> citations) throws IOException { // load style if necessary if (!style.trim().startsWith("<")) { if (!style.endsWith(".csl")) { style = style + ".csl"; } if (!style.startsWith("/")) { style = "/" + style; } URL styleUrl = CSL.class.getResource(style); assert styleUrl != null; style = IOUtils.toString(styleUrl, StandardCharsets.UTF_8); } try (Context context = Context.newBuilder("js") .engine(engine) .allowHostAccess(HostAccess.ALL) .allowHostClassLookup(className -> true) .build()) { context.eval(citeprocJsSource); Value bindings = context.getBindings("js"); bindings.putMember("style", style); bindings.putMember("itemDataProvider", itemDataProvider); bindings.putMember("itemIds", itemIds); bindings.putMember("outputFormat", outputFormat); bindings.putMember("citations", citations); LocaleProvider localeProvider = new DefaultLocaleProvider(); bindings.putMember("localeProvider", localeProvider); context.eval("js", "let StringJsonBuilderFactory = Java.type('de.undercouch.citeproc.helper.json.StringJsonBuilderFactory')\n" + "let jsonFactory = new StringJsonBuilderFactory()\n"); context.eval("js", "let sys = {\n" + "retrieveLocale: function(lang) { return localeProvider.retrieveLocale(lang) },\n" + "retrieveItem: function(id) {\n" + "let item = itemDataProvider.retrieveItem(id)\n" + "let jsonBuilder = jsonFactory.createJsonBuilder()\n" + "return JSON.parse(item.toJson(jsonBuilder))\n" + "}\n" + "}\n" + "let csl = new CSL.Engine(sys, style)\n" + "csl.setOutputFormat(outputFormat)\n" + "csl.opt.development_extensions.wrap_url_and_doi = true\n" + "for (let ids of itemIds) {\n" + // convert IDs to native JavaScript array "let idsArr = []\n" + "for (let id of ids) { idsArr.push(id) }\n" + "csl.updateItems(idsArr);\n" + "}\n"); if (abbreviationProvider != null) { bindings.putMember("abbreviationProvider", abbreviationProvider); context.eval("js", "sys.getAbbreviation = function(styleID, abbrevs, name, category, orig, itemType) {\n" + "let r = abbreviationProvider.getAbbreviation(category, orig, null)\n" + "if (r) {\n" + "abbrevs[name][category][orig] = r\n" + "}\n" + "}\n"); } String actualResult; if ("bibliography".equals(mode)) { context.eval("js", "let bibliography = csl.makeBibliography()\n" + "let biblStr = ''\n" + "if (bibliography[0].bibstart) biblStr += bibliography[0].bibstart\n" + "for (let e of bibliography[1]) biblStr += e\n" + "if (bibliography[0].bibend) biblStr += bibliography[0].bibend\n"); actualResult = bindings.getMember("biblStr").asString(); } else if ("citation".equals(mode)) { context.eval("js", "let generatedCitations = []\n" + "if (citations) {\n" + "for (let c of citations) {\n" + "let jsonBuilder = jsonFactory.createJsonBuilder()\n" + "let obj = JSON.parse(c.toJson(jsonBuilder))\n" + "let r = csl.appendCitationCluster(obj)\n" + "generatedCitations.push(r[0][1])\n" + "}\n" + "} else {\n" + "let citation = { citationItems: csl.registry.reflist }\n" + "let r = csl.appendCitationCluster(citation)\n" + "generatedCitations.push(r[0][1])\n" + "}\n" + "generatedCitations = generatedCitations.join('\\n')\n"); actualResult = bindings.getMember("generatedCitations").asString(); } else { throw new IllegalStateException("Unknown mode: " + mode); } return actualResult; } } private final static String[] TEST_SUITE_TESTS = new String[] { "affix_CommaAfterQuote", "affix_InterveningEmpty", "affix_MovingPunctuation", "affix_PrefixFullCitationTextOnly", // "affix_PrefixWithDecorations", "affix_SpaceWithQuotes", "affix_TextNodeWithMacro", // "affix_WithCommas", "affix_WordProcessorAffixNoSpace", "api_UpdateItemsDelete", "api_UpdateItemsDeleteDecrementsByCiteDisambiguation", "api_UpdateItemsReshuffle", // "bibheader_EntryspacingDefaultValueOne", // "bibheader_EntryspacingExplicitValueZero", // "bibheader_SecondFieldAlign", // "bibheader_SecondFieldAlignWithAuthor", // "bibheader_SecondFieldAlignWithNumber", "bugreports_Abnt", // "bugreports_AccidentalAllCaps", // "bugreports_AllCapsLeakage", // "bugreports_ApostropheOnParticle", // "bugreports_ArabicLocale", // "bugreports_AsaSpacing", // "bugreports_AsmJournals", // "bugreports_AuthorPosition", // "bugreports_AuthorYear", // "bugreports_AutomaticallyDeleteItemsFails", // "bugreports_BadCitationUpdate", // "bugreports_BadDelimiterBeforeCollapse", // "bugreports_ByBy", // "bugreports_CapsAfterOneWordPrefix", // "bugreports_ChicagoAuthorDateLooping", // "bugreports_ChineseCharactersFamilyOnlyPluralLabel", // "bugreports_CitationSortsWithEtAl", // "bugreports_CollapseFailure", // "bugreports_ContainerTitleShort", // "bugreports_ContentPunctuationDuplicate1", // "bugreports_ContextualPluralWithMainItemFields", // "bugreports_CreepingAddNames", // "bugreports_DelimiterOnLayout", // "bugreports_DelimitersOnLocator", // "bugreports_DemoPageFullCiteCruftOnSubsequent", // "bugreports_DisambiguationAddNames", // "bugreports_DisambiguationAddNamesBibliography", // "bugreports_DoubleEncodedAngleBraces", // "bugreports_DroppingGroupDelimiterSpace", // "bugreports_DuplicateSpaces", // "bugreports_DuplicateSpaces2", // "bugreports_DuplicateSpaces3", // "bugreports_DuplicateTerminalPunctuationInBibliography", // "bugreports_EmptyIfMatchNoneFail", // "bugreports_EmptyInput", // "bugreports_EnvAndUrb", // "bugreports_EtAlSubsequent", // "bugreports_FourAndFour", // "bugreports_FrenchApostrophe", // "bugreports_GreekStyleProblems", // "bugreports_GreekStyleTwoEditors", // "bugreports_IeeePunctuation", // "bugreports_IllustratorInExtra", // "bugreports_LabelsOutOfPlace", // "bugreports_LegislationCrash", // "bugreports_MatchedAuthorAndDate", // "bugreports_MissingItemInJoin", // "bugreports_MovePunctuationInsideQuotesForLocator", // "bugreports_NoCaseEscape", // "bugreports_NoEventInNestedMacroWithOldProcessor", // "bugreports_NoTitle", // "bugreports_NonBreakingSpaceJoinFail", // "bugreports_NumberAffixEscape", // "bugreports_NumberInMacroWithVerticalAlign", // "bugreports_OldMhraDisambiguationFailure", // "bugreports_OverwriteCitationItems", // "bugreports_ProcessorHang1", // "bugreports_SectionAndLocator", // "bugreports_SelfLink", // "bugreports_SimpleBib", // "bugreports_SingleQuote", // "bugreports_SingleQuoteXml", // "bugreports_SingletonIfMatchNoneFail", // "bugreports_SmallCapsEscape", // "bugreports_SortSecondaryKey", // "bugreports_SortSecondaryKeyBibliography", // "bugreports_SortedIeeeItalicsFail", // "bugreports_StyleError001", // "bugreports_ThesisUniversityAppearsTwice", // "bugreports_TitleCase", // "bugreports_TwoFullStops", // "bugreports_UndefinedBeforeVal", // "bugreports_UndefinedInName", // "bugreports_UndefinedInName2", // "bugreports_UndefinedInName3", // "bugreports_UndefinedNotString", // "bugreports_UndefinedStr", // "bugreports_UnisaHarvardInitialization", // "bugreports_YearSuffixInHarvard1", // "bugreports_YearSuffixLingers", // "bugreports_disambiguate", // "bugreports_effingBug", // "bugreports_ikeyOne", // "bugreports_parenthesis", // name with quotes and particle // "bugreports_parseName", // "bugreports_undefinedCrash", // "collapse_AuthorCollapse", // "collapse_AuthorCollapseDifferentAuthorsOneWithEtAl", // "collapse_AuthorCollapseNoDate", // "collapse_AuthorCollapseNoDateSorted", // "collapse_ChicagoAfterCollapse", // "collapse_CitationNumberRangesInsert", // "collapse_CitationNumberRangesMixed", // "collapse_CitationNumberRangesMixed2", // "collapse_CitationNumberRangesMixed3", // "collapse_CitationNumberRangesOneOnly", // "collapse_CitationNumberRangesWithAffixes", // "collapse_CitationNumberRangesWithAffixesGrouped", // "collapse_CitationNumberRangesWithAffixesGroupedLocator", // "collapse_CitationNumberRangesWithAffixesNoCollapse", // "collapse_NumericDuplicate", // "collapse_NumericDuplicate2", // "collapse_TrailingDelimiter", // "collapse_YearSuffixCollapse", // "collapse_YearSuffixCollapseNoRange", // "collapse_YearSuffixCollapseNoYearSuffixDelimiter", // "collapse_YearSuffixImplicitCollapseNoYearSuffixDelimiter", "condition_EmptyDate", "condition_EmptyIsNumericFalse", "condition_EmptyIsUncertainDateFalse", "condition_EmptyShortTitleFalse", "condition_FirstNullAny", // Abbreviations are not supported yet // "condition_IsNumeric", "condition_LocatorIsFalse", "condition_MatchAll", "condition_NameAndTextVars", "condition_NumberIsNumeric", "condition_NumeralIsNumeric", "condition_NumeralWithTextIsNumeric", "condition_RefTypeBranching", "condition_SingletonIfMatchNone", "condition_TextIsNotNumeric", "condition_VariableAll", "condition_VariableAny", "condition_VariableNone", // "date_Accessed", // "date_AccessedCrash", // "date_DateAD", // "date_DateBC", // "date_DateNoDateNoTest", // "date_DateNoDateWithTest", // "date_DayOrdinalDayOneOnly", // "date_DisappearingBug", // "date_EmptyStrings", // "date_IgnoreNonexistentSort", // "date_InPress", // "date_January", // "date_KeyVariable", // "date_LiteralFailGracefullyIfNoValue", // "date_LocalizedDateFormats-af-ZA", // "date_LocalizedDateFormats-ar-AR", // "date_LocalizedDateFormats-bg-BG", // "date_LocalizedDateFormats-ca-AD", // "date_LocalizedDateFormats-cs-CZ", // "date_LocalizedDateFormats-da-DK", // "date_LocalizedDateFormats-de-AT", // "date_LocalizedDateFormats-de-CH", // "date_LocalizedDateFormats-de-DE", // "date_LocalizedDateFormats-el-GR", // "date_LocalizedDateFormats-en-US", // "date_LocalizedDateFormats-es-ES", // "date_LocalizedDateFormats-et-EE", // "date_LocalizedDateFormats-fr-FR", // "date_LocalizedDateFormats-he-IL", // "date_LocalizedDateFormats-hu-HU", // "date_LocalizedDateFormats-is-IS", // "date_LocalizedDateFormats-it-IT", // "date_LocalizedDateFormats-ja-JP", // "date_LocalizedDateFormats-kh-KH", // "date_LocalizedDateFormats-ko-KR", // "date_LocalizedDateFormats-mn-MN", // "date_LocalizedDateFormats-nb-NO", // "date_LocalizedDateFormats-nl-NL", // "date_LocalizedDateFormats-pl-PL", // "date_LocalizedDateFormats-pt-BR", // "date_LocalizedDateFormats-pt-PT", // "date_LocalizedDateFormats-ro-RO", // "date_LocalizedDateFormats-ru-RU", // "date_LocalizedDateFormats-sk-SK", // "date_LocalizedDateFormats-sl-SL", // "date_LocalizedDateFormats-sr-RS", // "date_LocalizedDateFormats-sv-SE", // "date_LocalizedDateFormats-th-TH", // "date_LocalizedDateFormats-tr-TR", // "date_LocalizedDateFormats-uk-UA", // "date_LocalizedDateFormats-vi-VN", // "date_LocalizedDateFormats-zh-CN", // "date_LocalizedDateFormats-zh-TW", // "date_LocalizedNumericDefault", // "date_LocalizedNumericDefaultMissingDay", // "date_LocalizedNumericDefaultWithAffixes", // "date_LocalizedNumericYear", // "date_LocalizedNumericYearMonth", // "date_LocalizedNumericYearRange", // "date_LocalizedNumericYearWithAffixes", // "date_LocalizedTextDefault", // "date_LocalizedTextDefaultMissingDay", // "date_LocalizedTextDefaultWithAffixes", // "date_LocalizedTextInStyleLocaleWithTextCase", // "date_LocalizedTextMonthFormOverride", // "date_LocalizedTextYear", // "date_LocalizedTextYearMonth", // "date_LocalizedTextYearWithAffixes", // "date_LocalizedWithInStyleFormatting", // "date_LongMonth", // "date_LopsidedDataYearSuffixCollapse", // "date_MaskNonexistentWithCondition", // "date_NegativeDateSort", // "date_NegativeDateSortViaMacro", // "date_NegativeDateSortViaMacroOnYearMonthOnly", // "date_NoDate", // "date_NonexistentSortReverseBibliography", // "date_NonexistentSortReverseCitation", // "date_OtherAlone", // "date_OtherWithDate", // "date_RangeDelimiter", // "date_SeasonRange1", // "date_SeasonRange2", // "date_SeasonRange3", // "date_SeasonSubstituteInGroup", // "date_SortEmptyDatesBibliography", // "date_SortEmptyDatesCitation", "date_String", // "date_TextFormFulldateDayRange", // "date_TextFormFulldateMonthRange", // "date_TextFormFulldateYearRange", // "date_TextFormMonthdateMonthRange", // "date_TextFormMonthdateYearRange", // "date_TextFormYeardateYearRange", // "date_TextFormYeardateYearRangeOpen", // "date_Uncertain", // "date_VariousInvalidDates", // "date_YearSuffixDelimiter", // "date_YearSuffixImplicitWithNoDate", // "date_YearSuffixImplicitWithNoDateOneOnly", // "date_YearSuffixWithNoDate", // "decorations_AndTermUnaffectedByNameDecorations", // "decorations_Baseline", // "decorations_NestedQuotes", // "decorations_NestedQuotesInnerReverse", // "decorations_NoNormalWithoutDecoration", // "decorations_SimpleFlipFlop", // "decorations_SimpleQuotes", // "disambiguate_AddNamesFailure", // "disambiguate_AddNamesFailureWithAddGivenname", // "disambiguate_AddNamesSuccess", // "disambiguate_AllNamesBaseNameCountOnFailureIfYearSuffixAvailable", // "disambiguate_AllNamesGenerally", // "disambiguate_AllNamesSimpleSequence", // "disambiguate_AllNamesWithInitialsBibliography", // "disambiguate_AllNamesWithInitialsGenerally", // "disambiguate_AndreaEg1a", // "disambiguate_AndreaEg1b", // "disambiguate_AndreaEg1c", // "disambiguate_AndreaEg2", // "disambiguate_AndreaEg3", // "disambiguate_AndreaEg4", // "disambiguate_AndreaEg5", // "disambiguate_BasedOnEtAlSubsequent", // "disambiguate_BasedOnSubsequentFormWithBackref", // "disambiguate_BasedOnSubsequentFormWithBackref2", // "disambiguate_BasedOnSubsequentFormWithLocator", // "disambiguate_ByCiteBaseNameCountOnFailureIfYearSuffixAvailable", // "disambiguate_ByCiteDisambiguateCondition", // "disambiguate_ByCiteGivennameExpandCrossNestedNames", // "disambiguate_ByCiteGivennameNoShortFormInitializeWith", // "disambiguate_ByCiteGivennameShortFormInitializeWith", // "disambiguate_ByCiteGivennameShortFormNoInitializeWith", // "disambiguate_ByCiteIncremental1", // "disambiguate_ByCiteIncremental2", // "disambiguate_ByCiteIsDefault", // "disambiguate_ByCiteMinimalGivennameExpandMinimalNames", // "disambiguate_ByCiteRetainNamesOnFailureIfYearSuffixNotAvailable", // "disambiguate_ByCiteTwoAuthorsSameCite", // "disambiguate_ByCiteTwoAuthorsSameFamilyName", // "disambiguate_CitationLabelDefault", // "disambiguate_CitationLabelInData", // "disambiguate_DifferentSpacingInInitials", // "disambiguate_DisambiguateTrueAndYearSuffixOne", // "disambiguate_DisambiguateTrueReflectedInBibliography", // "disambiguate_DisambiguateWithThree", // "disambiguate_DisambiguateWithThree2", // "disambiguate_DisambiguationHang", // "disambiguate_ExtraTextCitation", // "disambiguate_FailWithYearSuffix", // "disambiguate_FamilyNameOnly", // "disambiguate_HonorFullnameInBibliography", // "disambiguate_ImplicitYearSuffixOnceOnly", // "disambiguate_IncrementalExtraText", // "disambiguate_InitializeWithButNoDisambiguation", // "disambiguate_LastOnlyFailWithByCite", // "disambiguate_NoTextElementUsesYearSuffixVariable", // "disambiguate_PrimaryNameGenerally", // "disambiguate_PrimaryNameWithInitialsLimitedToPrimary", // "disambiguate_SetsOfNames", // "disambiguate_SkipAccessedYearSuffix", // "disambiguate_ThreeNoAuthorNoTitleEntries", // "disambiguate_ToInitialOnly", // "disambiguate_Trigraph", // "disambiguate_WithOriginalYear", // "disambiguate_YearCollapseWithInstitution", // "disambiguate_YearSuffixAndSort", // "disambiguate_YearSuffixAtTwoLevels", // "disambiguate_YearSuffixFiftyTwoEntries", // "disambiguate_YearSuffixFiftyTwoEntriesByCite", // "disambiguate_YearSuffixMacroSameYearExplicit", // "disambiguate_YearSuffixMacroSameYearImplicit", // "disambiguate_YearSuffixMidInsert", // "disambiguate_YearSuffixMixedDates", // "disambiguate_YearSuffixTwoPairsBibliography", // "disambiguate_YearSuffixTwoPairsFirstNameBibliography", // "disambiguate_YearSuffixTwoPairsFullNamesBibliography", // "disambiguate_YearSuffixWithEtAlSubequent", // "disambiguate_YearSuffixWithMixedCreatorTypes", // "display_AuthorAsHeading", // "display_DisplayBlock", // "display_LostSuffix", // "display_SecondFieldAlignClone", // "display_SecondFieldAlignMigratePunctuation", // "etal_CitationAndBibliographyDecorationsInBibliography", // "etal_CitationAndBibliographyDecorationsInCitation", // "etal_ShortFormOfName", // "etal_UseZeroFirst", // "flipflop_Apostrophes", // "flipflop_BoldfaceNodeLevelMarkup", // "flipflop_CompleteCiteInPrefix", // "flipflop_ItalicsFlipped", // "flipflop_ItalicsSimple", // "flipflop_ItalicsWithOk", // "flipflop_ItalicsWithOkAndTextcase", // "flipflop_LeadingMarkupWithApostrophe", // "flipflop_LeadingSingleQuote", // "flipflop_LongComplexPrefix", // "flipflop_NumericField", // "flipflop_OrphanQuote", // "flipflop_QuotesInFieldNotOnNode", // "flipflop_QuotesNodeLevelMarkup", // "flipflop_SingleBeforeColon", // "flipflop_SingleQuotesOnItalics", // "flipflop_SmallCaps", // "flipflop_StartingApostrophe", // "form_ShortTitleOnly", // "form_TitleShort", // "form_TitleShortNoLong", // "form_TitleTestNoLongFalse", // "fullstyles_ABdNT", // "fullstyles_APA", // "fullstyles_ChicagoArticleTitleQuestion", // "fullstyles_ChicagoAuthorDateSimple", // "fullstyles_ChicagoNoteWithBibliographyWithPublisher", "group_ComplexNesting", // "group_LegalWithAuthorDate", "group_ShortOutputOnly", // "group_SuppressTermInMacro", "group_SuppressTermWhenNoOutputFromPartialDate", "group_SuppressValueWithEmptySubgroup", "group_SuppressWithEmptyNestedDateNode", // "integration_CitationSort", // "integration_CitationSortTwice", // "integration_DeleteName", // "integration_DisambiguateAddGivenname1", // "integration_DisambiguateAddGivenname2", // "integration_DuplicateItem", // "integration_DuplicateItem2", // "integration_FirstReferenceNoteNumberPositionChange", // "integration_IbidOnInsert", // "integration_IbidWithDifferentLocators", // "integration_SimpleFirstReferenceNoteNumber", // "integration_SimpleIbid", // "integration_SubsequentWhenInterveningFootnote", // "integration_YearSuffixOnOffOn", // "label_CollapsedPageNumberPluralDetection", // "label_CompactNamesAfterFullNames", // "label_EditorTranslator1", // "label_EditorTranslator2", // "label_EmptyLabelVanish", // "label_EmptyLabelVanishPage", // "label_ImplicitForm", // "label_MissingReturnsEmpty", // "label_NameLabelThroughSubstitute", // "label_NoFirstCharCapWithInTextClass", // "label_NonexistentNameVariableLabel", // "label_PageWithEmbeddedLabel", // "label_PluralNumberOfVolumes", // "label_PluralPagesWithAlphaPrefix", // "label_PluralWithAmpersand", // "label_PluralWithAnd", // "label_PluralWithCommaAnd", // "label_PluralWithCommaLocalizedAnd", // "label_PluralWithLocalizedAmpersand", // "label_PluralWithLocalizedAnd", // "locale_EmptyDate", // "locale_EmptyPlusOverrideDate", // "locale_EmptyPlusOverrideStyleOpt", // "locale_EmptyPlusOverrideTerm", // "locale_EmptyStyleOpt", // "locale_EmptyTerm", // "locale_ForceEmptyAndOthersTerm", // "locale_ForceEmptyEtAlTerm", // "locale_NonExistentLocaleDef", // "locale_OverloadWithEmptyString", // "locale_PageRangeDelimiterTermDefined", // "locale_PageRangeDelimiterTermFrenchUndef", // "locale_PageRangeDelimiterTermUndefined", // "locale_SpecificDate", // "locale_SpecificStyleOpt", // "locale_SpecificTerm", // "locale_TitleCaseEmptyLangEmptyLocale", // "locale_TitleCaseEmptyLangNonEnglishLocale", // "locale_TitleCaseGarbageLangEmptyLocale", // "locale_TitleCaseGarbageLangEnglishLocale", // "locale_TitleCaseGarbageLangNonEnglishLocale", // "locale_TitleCaseNonEnglishLangUpperEmptyLocale", // "locale_UnknownTerm", // "locator_SimpleLocators", // "locator_SingularEmbeddedLabelAfterPlural", // "locator_TermSelection", // "locator_TrickyEntryForPlurals", // "locator_WithLeadingSpace", // "locator_WorkaroundTestForSubVerbo", // "magic_AllowRepeatDateRenderings", // "magic_CapitalizeFirstOccurringNameParticle", // "magic_CapitalizeFirstOccurringTerm", // "magic_CitationLabelInBibliography", // "magic_CitationLabelInCitation", // "magic_EntrySpacingDouble", // "magic_HangingIndent", // "magic_ImplicitYearSuffixDelimiter", // "magic_ImplicitYearSuffixExplicitDelimiter", // "magic_LineSpacingDouble", // "magic_LineSpacingTripleStretch", // "magic_NameParticle", // "magic_NameSuffixNoComma", // "magic_NameSuffixWithComma", // "magic_NumberRangeEnglish", // "magic_NumberRangeFrench", // "magic_PunctuationInQuoteDefaultEnglishDelimiter", // "magic_PunctuationInQuoteDefaultEnglishSuffix", // "magic_PunctuationInQuoteDelimiterTrue", // "magic_PunctuationInQuoteFalse", // "magic_PunctuationInQuoteFalseSuppressExtra", // "magic_PunctuationInQuoteNested", // "magic_PunctuationInQuoteSuffixTrue", // "magic_PunctuationInQuoteTrueSuppressExtra", // "magic_QuotesAndBraces1", // "magic_QuotesAndBraces2", // "magic_SecondFieldAlign", // "magic_StripPeriodsExcludeAffixes", // "magic_StripPeriodsFalse", // "magic_StripPeriodsTrue", // "magic_StripPeriodsTrueShortForm", // "magic_SubsequentAuthorSubstitute", // "magic_SubsequentAuthorSubstituteNotFooled", // "magic_SubsequentAuthorSubstituteOfTitleField", // "magic_SuperscriptChars", // "magic_SuppressDuplicateVariableRendering", // "magic_SuppressLayoutDelimiterIfPrefixComma", // "magic_TermCapitalizationWithPrefix", // "magic_TextRangeEnglish", // "magic_TextRangeFrench", // "name_AfterInvertedName", // "name_AllCapsInitialsUntouched", // "name_AndTextDelimiterPrecedesLastAlways", // "name_ApostropheInGivenName", // "name_ArabicShortForms", // "name_ArticularNameAsSortOrder", // "name_ArticularPlain", // "name_ArticularShortForm", // "name_ArticularShortFormCommaSuffix", // "name_ArticularWithComma", // "name_ArticularWithCommaNameAsSortOrder", // "name_AsianGlyphs", // "name_AuthorCount", // "name_AuthorCountWithMultipleVariables", // "name_AuthorCountWithSameVarContentAndCombinedTermFail", // "name_AuthorCountWithSameVarContentAndCombinedTermSucceed", // "name_AuthorEditorCount", // "name_BibliographyNameFormNeverShrinks", // "name_CelticClanName", // "name_CeltsAndToffsCrowdedInitials", // "name_CeltsAndToffsNoHyphens", // "name_CeltsAndToffsSpacedInitials", // "name_CeltsAndToffsWithHyphens", // "name_CiteGroupDelimiterWithYearCollapse", // "name_CiteGroupDelimiterWithYearSuffixCollapse", // "name_CiteGroupDelimiterWithYearSuffixCollapse2", // "name_CiteGroupDelimiterWithYearSuffixCollapse3", // "name_CollapseRoleLabels", // "name_Delimiter", // "name_EditorTranslatorBoth", // "name_EditorTranslatorSameEmptyTerm", // "name_EditorTranslatorSameWithTerm", // "name_EditorTranslatorWithTranslatorOnlyBib", // "name_EtAlKanji", // "name_EtAlUseLast", // "name_EtAlWithCombined", // "name_FirstInitialFullForm", // "name_FormattingOfParticles", // "name_GreekSimple", // "name_HebrewAnd", // "name_HierarchicalDelimiter", // "name_HyphenatedFirstName", // "name_HyphenatedNonDroppingParticle1", // "name_HyphenatedNonDroppingParticle2", // "name_InheritAttributesEtAlStyle", "name_InitialsInitializeFalse", "name_InitialsInitializeFalseEmpty", "name_InitialsInitializeFalsePeriod", "name_InitialsInitializeFalsePeriodSpace", "name_InitialsInitializeTrue", "name_InitialsInitializeTrueEmpty", "name_InitialsInitializeTruePeriod", "name_InitialsInitializeTruePeriodSpace", // "name_Institution", // "name_InstitutionDecoration", // "name_LabelAfterPlural", // "name_LabelAfterPluralDecorations", // "name_LabelFormatBug", // "name_LiteralWithComma", // "name_LongAbbreviation", // "name_LowercaseSurnameSuffix", // "name_MultipleLiteral", // "name_NoNameNode", // "name_NonDroppingParticleDefault", // "name_OnlyFamilyname", // "name_OnlyGivenname", // "name_OverridingHierarchicalDelimiter", // "name_ParseNames", // "name_ParsedCommaDelimitedDroppingParticleSortOrderingWithoutAffixes", // "name_ParsedDroppingParticleWithAffixes", // "name_ParsedDroppingParticleWithApostrophe", // "name_ParsedNonDroppingParticleWithAffixes", // "name_ParsedNonDroppingParticleWithApostrophe", // "name_ParsedUpperCaseNonDroppingParticle", // "name_ParticleCaps1", // "name_ParticleCaps2", // "name_ParticleCaps3", // "name_ParticleFormatting", // "name_ParticleParse1", // "name_ParticlesDemoteNonDroppingNever", // "name_PeriodAfterInitials", // "name_QuashOrdinaryVariableRenderedViaSubstitute", // "name_RomanianTwo", // "name_SemicolonWithAnd", // "name_SplitInitials", // "name_StaticParticles", // "name_SubsequentAuthorSubstituteMultipleNames", // "name_SubstituteInheritLabel", // "name_SubstituteMacroInheritDecorations", // "name_SubstituteName", // "name_SubstituteOnDateGroupSpanFail", // "name_SubstituteOnGroupSpanGroupSpanFail", // "name_SubstituteOnMacroGroupSpanFail", // "name_SubstituteOnNamesSingletonGroupSpanFail", // "name_SubstituteOnNamesSpanGroupSpanFail", // "name_SubstituteOnNamesSpanNamesSpanFail", // "name_SubstituteOnNumberGroupSpanFail", // "name_TwoRolesSameRenderingSeparateRoleLabels", // "name_WesternArticularLowercase", // "name_WesternPrimaryFontStyle", // "name_WesternPrimaryFontStyleTwoAuthors", // "name_WesternSimple", // "name_WesternTwoAuthors", // "name_WithNonBreakingSpace", // "name_namepartAffixes", // "name_namepartAffixesNameAsSortOrder", // "name_namepartAffixesNameAsSortOrderDemoteNonDroppingParticle", // "nameattr_AndOnBibliographyInBibliography", // "nameattr_AndOnBibliographyInCitation", // "nameattr_AndOnCitationInBibliography", // "nameattr_AndOnCitationInCitation", // "nameattr_AndOnNamesInBibliography", // "nameattr_AndOnNamesInCitation", // "nameattr_AndOnStyleInBibliography", // "nameattr_AndOnStyleInCitation", // "nameattr_DelimiterPrecedesEtAlOnBibliographyInBibliography", // "nameattr_DelimiterPrecedesEtAlOnBibliographyInCitation", // "nameattr_DelimiterPrecedesEtAlOnCitationInBibliography", // "nameattr_DelimiterPrecedesEtAlOnCitationInCitation", // "nameattr_DelimiterPrecedesEtAlOnNamesInBibliography", // "nameattr_DelimiterPrecedesEtAlOnNamesInCitation", // "nameattr_DelimiterPrecedesEtAlOnStyleInBibliography", // "nameattr_DelimiterPrecedesEtAlOnStyleInCitation", // "nameattr_DelimiterPrecedesLastOnBibliographyInBibliography", // "nameattr_DelimiterPrecedesLastOnBibliographyInCitation", // "nameattr_DelimiterPrecedesLastOnCitationInBibliography", // "nameattr_DelimiterPrecedesLastOnCitationInCitation", // "nameattr_DelimiterPrecedesLastOnNamesInBibliography", // "nameattr_DelimiterPrecedesLastOnNamesInCitation", // "nameattr_DelimiterPrecedesLastOnStyleInBibliography", // "nameattr_DelimiterPrecedesLastOnStyleInCitation", // "nameattr_EtAlMinOnBibliographyInBibliography", // "nameattr_EtAlMinOnBibliographyInCitation", // "nameattr_EtAlMinOnCitationInBibliography", // "nameattr_EtAlMinOnCitationInCitation", // "nameattr_EtAlMinOnNamesInBibliography", // "nameattr_EtAlMinOnNamesInCitation", // "nameattr_EtAlMinOnStyleInBibliography", // "nameattr_EtAlMinOnStyleInCitation", // "nameattr_EtAlSubsequentMinOnBibliographyInBibliography", // "nameattr_EtAlSubsequentMinOnBibliographyInCitation", // "nameattr_EtAlSubsequentMinOnCitationInBibliography", // "nameattr_EtAlSubsequentMinOnCitationInCitation", // "nameattr_EtAlSubsequentMinOnNamesInBibliography", // "nameattr_EtAlSubsequentMinOnStyleInBibliography", // "nameattr_EtAlSubsequentMinOnStyleInCitation", // "nameattr_EtAlSubsequentUseFirstOnBibliographyInBibliography", // "nameattr_EtAlSubsequentUseFirstOnBibliographyInCitation", // "nameattr_EtAlSubsequentUseFirstOnCitationInBibliography", // "nameattr_EtAlSubsequentUseFirstOnCitationInCitation", // "nameattr_EtAlSubsequentUseFirstOnStyleInBibliography", // "nameattr_EtAlSubsequentUseFirstOnStyleInCitation", // "nameattr_EtAlUseFirstOnBibliographyInBibliography", // "nameattr_EtAlUseFirstOnBibliographyInCitation", // "nameattr_EtAlUseFirstOnCitationInBibliography", // "nameattr_EtAlUseFirstOnCitationInCitation", // "nameattr_EtAlUseFirstOnNamesInBibliography", // "nameattr_EtAlUseFirstOnNamesInCitation", // "nameattr_EtAlUseFirstOnStyleInBibliography", // "nameattr_EtAlUseFirstOnStyleInCitation", // "nameattr_InitializeWithOnBibliographyInBibliography", // "nameattr_InitializeWithOnBibliographyInCitation", // "nameattr_InitializeWithOnCitationInBibliography", // "nameattr_InitializeWithOnCitationInCitation", // "nameattr_InitializeWithOnNamesInBibliography", // "nameattr_InitializeWithOnNamesInCitation", // "nameattr_InitializeWithOnStyleInBibliography", // "nameattr_InitializeWithOnStyleInCitation", // "nameattr_NameAsSortOrderOnBibliographyInBibliography", // "nameattr_NameAsSortOrderOnBibliographyInCitation", // "nameattr_NameAsSortOrderOnCitationInBibliography", // "nameattr_NameAsSortOrderOnCitationInCitation", // "nameattr_NameAsSortOrderOnNamesInBibliography", // "nameattr_NameAsSortOrderOnNamesInCitation", // "nameattr_NameAsSortOrderOnStyleInBibliography", // "nameattr_NameAsSortOrderOnStyleInCitation", // "nameattr_NameDelimiterOnBibliographyInBibliography", // "nameattr_NameDelimiterOnBibliographyInCitation", // "nameattr_NameDelimiterOnCitationInBibliography", // "nameattr_NameDelimiterOnCitationInCitation", // "nameattr_NameDelimiterOnNamesInBibliography", // "nameattr_NameDelimiterOnNamesInCitation", // "nameattr_NameDelimiterOnStyleInBibliography", // "nameattr_NameDelimiterOnStyleInCitation", // "nameattr_NameFormOnBibliographyInBibliography", // "nameattr_NameFormOnBibliographyInCitation", // "nameattr_NameFormOnCitationInBibliography", // "nameattr_NameFormOnCitationInCitation", // "nameattr_NameFormOnNamesInBibliography", // "nameattr_NameFormOnNamesInCitation", // "nameattr_NameFormOnStyleInBibliography", // "nameattr_NameFormOnStyleInCitation", // "nameattr_NamesDelimiterOnBibliographyInBibliography", // "nameattr_NamesDelimiterOnBibliographyInCitation", // "nameattr_NamesDelimiterOnCitationInBibliography", // "nameattr_NamesDelimiterOnCitationInCitation", // "nameattr_NamesDelimiterOnNamesInBibliography", // "nameattr_NamesDelimiterOnNamesInCitation", // "nameattr_NamesDelimiterOnStyleInBibliography", // "nameattr_NamesDelimiterOnStyleInCitation", // "nameattr_SortSeparatorOnBibliographyInBibliography", // "nameattr_SortSeparatorOnBibliographyInCitation", // "nameattr_SortSeparatorOnCitationInBibliography", // "nameattr_SortSeparatorOnCitationInCitation", // "nameattr_SortSeparatorOnNamesInBibliography", // "nameattr_SortSeparatorOnNamesInCitation", // "nameattr_SortSeparatorOnStyleInBibliography", // "nameattr_SortSeparatorOnStyleInCitation", // "nameorder_Long", // "nameorder_LongNameAsSortDemoteDisplayAndSort", // "nameorder_LongNameAsSortDemoteNever", // "nameorder_Short", // "nameorder_ShortDemoteDisplayAndSort", // "nameorder_ShortNameAsSortDemoteNever", // "namespaces_NonNada3", // "number_EditionSort", // "number_FailingDelimiters", // "number_IsNumericWithAlpha", // "number_LeadingZeros", // "number_MixedPageRange", // "number_MixedText", // "number_NewOrdinalsEdition", // "number_NewOrdinalsWithGenderChange", // "number_OrdinalSpacing", // "number_PageFirst", // "number_PageRange", // "number_PlainHyphenOrEnDashAlwaysPlural", // "number_PreserveDelimiter", // "number_SeparateOrdinalNamespaces", // "number_SimpleNumberArabic", // "number_SimpleNumberOrdinalLong", // "number_SimpleNumberOrdinalShort", // "number_SimpleNumberRoman", // "number_StrangeError", // "page_Chicago", // "page_ChicagoWeird", // "page_Expand", // "page_ExpandWeirdComposite", // "page_Minimal", // "page_NoOption", // "page_NumberPageFirst", // "page_PluralDetectWithEndash", // "page_WithLocaleAndWeirdDelimiter", // "plural_LabelForced", // "plural_NameLabelAlways", // "plural_NameLabelContextualPlural", // "plural_NameLabelContextualSingular", // "plural_NameLabelDefaultPlural", // "plural_NameLabelDefaultSingular", // "plural_NameLabelNever", "position_FalseInBibliography", // "position_FirstTrueOnlyOnce", // "position_IbidInText", // "position_IbidSeparateCiteSameNote", // "position_IbidWithLocator", // "position_IbidWithMultipleSoloCitesInBackref", // "position_IbidWithPrefixFullStop", "position_IbidWithSuffix", // "position_IfIbidIsTrueThenSubsequentIsTrue", // "position_IfIbidWithLocatorIsTrueThenIbidIsTrue", // "position_NearNoteFalse", // "position_NearNoteSameNote", // "position_NearNoteTrue", // "position_NearNoteUnsupported", // "position_NearNoteWithPlugin", // "position_ResetNoteNumbers", "position_TrueInCitation", // "punctuation_DateStripPeriods", // "punctuation_DefaultYearSuffixDelimiter", // "punctuation_DelimiterWithStripPeriodsAndSubstitute1", // "punctuation_DelimiterWithStripPeriodsAndSubstitute2", // "punctuation_DelimiterWithStripPeriodsAndSubstitute3", // "punctuation_DoNotSuppressColonAfterPeriod", // "punctuation_FieldDuplicates", // "punctuation_FrenchOrthography", // "punctuation_FullMontyField", // "punctuation_FullMontyPlain", // "punctuation_FullMontyQuotesIn", // "punctuation_FullMontyQuotesOut", // "punctuation_NoSuppressOfPeriodBeforeSemicolon", // "punctuation_OnMacro", // "punctuation_SemicolonDelimiter", // "punctuation_SuppressPrefixPeriodForDelimiterSemicolon", // "quotes_Punctuation", // "quotes_PunctuationNasty", // "quotes_PunctuationWithInnerQuote", // "quotes_QuotesUnderQuotesFalse", "simplespace_case1", // "sort_AguStyle", // "sort_AguStyleReverseGroups", // "sort_AuthorDateWithYearSuffix", // "sort_BibliographyCitationNumberDescending", // "sort_BibliographyCitationNumberDescendingSecondary", // "sort_BibliographyCitationNumberDescendingViaCompositeMacro", // "sort_BibliographyCitationNumberDescendingViaMacro", // "sort_BibliographyNosortOption", // "sort_BibliographyResortOnUpdate", // "sort_CaseInsensitiveBibliography", // "sort_CaseInsensitiveCitation", // "sort_ChangeInNameSort", // "sort_ChicagoYearSuffix1", // "sort_ChicagoYearSuffix2", // "sort_Citation", // "sort_CitationEdit", // "sort_CitationNumberPrimaryAscendingViaMacroBibliography", // "sort_CitationNumberPrimaryAscendingViaMacroCitation", // "sort_CitationNumberPrimaryAscendingViaVariableBibliography", // "sort_CitationNumberPrimaryAscendingViaVariableCitation", // "sort_CitationNumberPrimaryDescendingViaMacroBibliography", // "sort_CitationNumberPrimaryDescendingViaMacroCitation", // "sort_CitationNumberPrimaryDescendingViaVariableBibliography", // "sort_CitationNumberPrimaryDescendingViaVariableCitation", // "sort_CitationNumberSecondaryAscendingViaMacroBibliography", // "sort_CitationNumberSecondaryAscendingViaMacroCitation", // "sort_CitationNumberSecondaryAscendingViaVariableBibliography", // "sort_CitationNumberSecondaryAscendingViaVariableCitation", // "sort_CitationSecondaryKey", // "sort_CitationUnsorted", // "sort_CiteGroupDelimiter", // "sort_ConditionalMacroDates", // "sort_DaleDalebout", // "sort_DateMacroSortWithSecondFieldAlign", // "sort_DateVariable", // "sort_DateVariableMixedElementsAscendingA", // "sort_DateVariableMixedElementsAscendingB", // "sort_DateVariableMixedElementsDescendingA", // "sort_DateVariableMixedElementsDescendingB", // "sort_DateVariableRange", // "sort_DateVariableRangeMixed", // "sort_DropNameLabelInSort", // "sort_EtAlUseLast", // "sort_FamilyOnly", // "sort_GroupedByAuthorstring", // "sort_LatinUnicode", // "sort_LeadingA", // "sort_LeadingApostropheOnNameParticle", // "sort_LocalizedDateLimitedParts", // "sort_NameImplicitSortOrderAndForm", // "sort_NameParticleInNameSortFalse", // "sort_NameParticleInNameSortTrue", // "sort_NameVariable", // "sort_NamesUseLast", // "sort_NumberOfAuthorsAsKey", // "sort_OmittedBibRefMixedNumericStyle", // "sort_OmittedBibRefNonNumericStyle", // "sort_Quotes", // "sort_RangeUnaffected", // "sort_SeparateAuthorsAndOthers", // "sort_StatusFieldAscending", // "sort_StatusFieldDescending", // "sort_StripMarkup", // "sort_SubstituteTitle", // "sort_TestInheritance", // "sort_VariousNameMacros1", // "sort_VariousNameMacros2", // "sort_VariousNameMacros3", // "sort_WithAndInOneEntry", // "sortseparator_SortSeparatorEmpty", // "substitute_RepeatedNamesOk", // "substitute_SharedMacro", // "substitute_SuppressOrdinaryVariable", // "testers_FirstAutoGeneratedZoteroPluginTest", // "testers_SecondAutoGeneratedZoteroPluginTest", "textcase_AfterQuote", // we do not support <span class="nocase"/> // "textcase_CapitalizeAll", // we do not support <span class="nocase"/> // "textcase_CapitalizeFirst", // we do not support <span class="nocase"/> // "textcase_CapitalizeFirstWithDecor", // Title case should work at the beginning of a new sentence // "textcase_CapitalsUntouched", // we do not support <span class="xxx"/> // "textcase_ImplicitNocase", "textcase_InQuotes", // why should we not make the last character uppercase? // "textcase_LastChar", // we do not support <span class="nocase"/> // "textcase_Lowercase", // invalid style XML // "textcase_NoSpaceBeforeApostrophe", "textcase_NonEnglishChars", "textcase_RepeatedTitleBug", // we do not support <span class="nocase"/> // "textcase_SentenceCapitalization", // not supported yet // "textcase_SkipNameParticlesInTitleCase", "textcase_StopWordBeforeHyphen", // we do not support <span class="nocase"/> // "textcase_TitleCapitalization", // we do not support <span class="nocase"/> // "textcase_TitleCapitalization2", "textcase_TitleCaseNonEnglish", "textcase_TitleCaseNonEnglish2", // we do not support <span class="nocase"/> // "textcase_TitleCaseWithFinalNocase", "textcase_TitleCaseWithHyphens", "textcase_TitleCaseWithInitials", "textcase_TitleCaseWithNonBreakSpace", "textcase_TitleWithCircumflex", "textcase_TitleWithEmDash", "textcase_TitleWithEnDash", // we do not support <span class="nocase"/> // "textcase_Uppercase", // we do not support roman numbers yet // "textcase_UppercaseNumber", "unicode_NonBreakingSpace", // "variables_ContainerTitleShort", // "variables_ContainerTitleShort2", "variables_ShortForm", "variables_TitleShortOnAbbrevWithTitle", "variables_TitleShortOnAbbrevWithTitleCondition", "variables_TitleShortOnAbbrevWithTitleGroup", "variables_TitleShortOnShortTitleNoTitle", "variables_TitleShortOnShortTitleNoTitleCondition", "variables_TitleShortOnShortTitleNoTitleGroup", // "virtual_PageFirst" }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.datasource; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.resource.ResourceException; import javax.resource.spi.ConnectionEvent; import javax.resource.spi.ConnectionEventListener; import javax.resource.spi.ConnectionManager; import javax.resource.spi.ConnectionRequestInfo; import javax.resource.spi.ManagedConnection; import javax.resource.spi.ManagedConnectionFactory; import javax.security.auth.Subject; import javax.transaction.RollbackException; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.xa.XAResource; import org.apache.logging.log4j.Logger; import org.apache.geode.internal.jndi.JNDIInvoker; import org.apache.geode.internal.jta.TransactionManagerImpl; import org.apache.geode.internal.logging.LogService; /** * This class implements a connection pool manager for managed connections (JCA) for transactional * and non-transactional resource connection. Implements ConnectionManager interface. QoS * (Transaction, Security etc is taken into account while allocating a connection). Security related * features are remaining. * */ public class FacetsJCAConnectionManagerImpl implements ConnectionManager, ConnectionEventListener, Synchronization { private static final Logger logger = LogService.getLogger(); private static final long serialVersionUID = 2454746064736724758L; protected transient TransactionManager transManager; protected ConnectionPoolCache mannPoolCache; protected ConnectionRequestInfo conReqInfo = null; protected Subject subject = null; protected boolean isActive = true; private transient ThreadLocal xalistThreadLocal = new ThreadLocal() { @Override protected Object initialValue() { return new ArrayList(); } }; /* * Constructor. * */ public FacetsJCAConnectionManagerImpl(ManagedConnectionFactory mcf, ConfiguredDataSourceProperties configs) { // Get the security info and form the Subject // Initialize the Pool. try { isActive = true; mannPoolCache = new ManagedPoolCacheImpl(mcf, null, null, this, configs); } catch (Exception ex) { logger.fatal(String.format( "FacetsJCAConnectionManagerImpl::Constructor: An Exception was caught while initializing due to %s", ex.getMessage()), ex); } } /* * allocates a ManagedConnection from the ConnectionPool or creates a new * ManagedConnection. @param javax.resource.spi.ManagedConnectionFactory * * @param javax.resource.spi.ConnectionRequestInfo * */ @Override public Object allocateConnection(ManagedConnectionFactory mcf, ConnectionRequestInfo reqInfo) throws ResourceException { if (!isActive) { throw new ResourceException( "FacetsJCAConnectionManagerImpl::allocateConnection::No valid Connection available"); } ManagedConnection conn = null; try { conn = (ManagedConnection) mannPoolCache.getPooledConnectionFromPool(); } catch (PoolException ex) { ex.printStackTrace(); throw new ResourceException( String.format( "FacetsJCAConnectionManagerImpl:: allocateConnection : in getting connection from pool due to %s", ex.getMessage()), ex); } // Check if a connection is having a transactional context // if a transactional context is used, get the XA Resource // from the ManagedConnection and register it with the // Transaction Manager. try { synchronized (this) { if (transManager == null) { transManager = JNDIInvoker.getTransactionManager(); } } Transaction txn = transManager.getTransaction(); if (txn != null) { // Check if Data Source provides XATransaction // if(configs.getTransactionType = "XATransaction") XAResource xar = conn.getXAResource(); txn.enlistResource(xar); java.util.List resList = (List) xalistThreadLocal.get(); if (resList.size() == 0) { // facets specific implementation // register syschronisation only once txn.registerSynchronization(this); } resList.add(conn); // xalistThreadLocal.set(resList); // Asif :Add in the Map after successful registration of XAResource // xaResourcesMap.put(conn, xar); // else throw a resource exception } } catch (RollbackException ex) { String exception = String.format( "FacetsJCAConnectionManagerImpl:: An Exception was caught while allocating a connection due to %s", ex.getMessage()); throw new ResourceException(exception, ex); } catch (SystemException ex) { throw new ResourceException( String.format( "FacetsJCAConnectionManagerImpl:: allocateConnection :system exception due to %s", ex.getMessage()), ex); } return conn.getConnection(subject, reqInfo); } /** * CallBack for Connection Error. * * @param event ConnectionEvent */ @Override public void connectionErrorOccurred(ConnectionEvent event) { if (isActive) { // If its an XAConnection ManagedConnection conn = (ManagedConnection) event.getSource(); // XAResource xar = (XAResource) xaResourcesMap.get(conn); ((List) xalistThreadLocal.get()).remove(conn); TransactionManagerImpl transManager = TransactionManagerImpl.getTransactionManager(); try { Transaction txn = transManager.getTransaction(); if (txn == null) { mannPoolCache.returnPooledConnectionToPool(conn); } else { // do nothing. } } catch (Exception se) { se.printStackTrace(); } try { mannPoolCache.expirePooledConnection(conn); // mannPoolCache.destroyPooledConnection(conn); } catch (Exception ex) { String exception = "FacetsJCAConnectionManagerImpl::connectionErrorOccurred: Exception occurred due to " + ex.getMessage(); if (logger.isDebugEnabled()) { logger.debug(exception, ex); } } } } /** * Callback for Connection Closed. * * @param event ConnectionEvent Object. */ @Override public void connectionClosed(ConnectionEvent event) { if (isActive) { ManagedConnection conn = (ManagedConnection) event.getSource(); TransactionManagerImpl transManager = TransactionManagerImpl.getTransactionManager(); try { Transaction txn = transManager.getTransaction(); if (txn == null) { mannPoolCache.returnPooledConnectionToPool(conn); } } catch (Exception se) { String exception = "FacetsJCAConnectionManagerImpl::connectionClosed: Exception occurred due to " + se.getMessage(); if (logger.isDebugEnabled()) { logger.debug(exception, se); } } } } /* * Local Transactions are not supported by Gemfire cache. */ @Override public void localTransactionCommitted(ConnectionEvent arg0) { // do nothing. } /* * Local Transactions are not supported by Gemfire cache. */ @Override public void localTransactionRolledback(ConnectionEvent arg0) { // do nothing. } /* * Local Transactions are not supported by Gemfire cache. */ @Override public void localTransactionStarted(ConnectionEvent arg0) { // do nothing } public void clearUp() { isActive = false; mannPoolCache.clearUp(); } /* * (non-Javadoc) * * @see javax.transaction.Synchronization#afterCompletion(int) */ @Override public void afterCompletion(int arg0) { // DELIST THE XARESOURCE FROM THE LIST. RETURN ALL THE CONNECTIONS TO THE // POOL. java.util.List lsConn = (ArrayList) xalistThreadLocal.get(); Iterator itr = lsConn.iterator(); while (itr.hasNext()) { ManagedConnection conn = (ManagedConnection) itr.next(); mannPoolCache.returnPooledConnectionToPool(conn); } lsConn.clear(); // return all the connections to pool. } /* * (non-Javadoc) * * @see javax.transaction.Synchronization#beforeCompletion() */ @Override public void beforeCompletion() { // TODO Auto-generated method stub } }
// Generated by delegate_gen on Sat Aug 10 14:39:58 PDT 2013 package info.bytecraft.api; public abstract class PlayerDelegate { private org.bukkit.entity.Player delegate; protected PlayerDelegate(org.bukkit.entity.Player d) { this.delegate = d; } public void setDelegate(org.bukkit.entity.Player v) { this.delegate = v; } public org.bukkit.entity.Player getDelegate() { return delegate; } private void checkState() { if (delegate == null) { throw new IllegalStateException( "Can't be used when delegate isn't set."); } } public java.net.InetSocketAddress getAddress() { checkState(); return delegate.getAddress(); } public java.lang.String getDisplayName() { checkState(); return delegate.getDisplayName(); } public void setLevel(int p0) { checkState(); delegate.setLevel(p0); } public boolean isSprinting() { checkState(); return delegate.isSprinting(); } public void giveExp(int p0) { checkState(); delegate.giveExp(p0); } public void loadData() { checkState(); delegate.loadData(); } @SuppressWarnings("deprecation") public boolean isOnGround() { checkState(); return delegate.isOnGround(); } public float getWalkSpeed() { checkState(); return delegate.getWalkSpeed(); } public boolean isFlying() { checkState(); return delegate.isFlying(); } public float getFlySpeed() { checkState(); return delegate.getFlySpeed(); } public void sendMap(org.bukkit.map.MapView p0) { checkState(); delegate.sendMap(p0); } public boolean isSneaking() { checkState(); return delegate.isSneaking(); } public void saveData() { checkState(); delegate.saveData(); } public void setSprinting(boolean p0) { checkState(); delegate.setSprinting(p0); } public <T extends java.lang.Object> void playEffect(org.bukkit.Location p0, org.bukkit.Effect p1, T p2) { checkState(); delegate.playEffect(p0, p1, p2); } public void playEffect(org.bukkit.Location p0, org.bukkit.Effect p1, int p2) { checkState(); delegate.playEffect(p0, p1, p2); } public boolean canSee(org.bukkit.entity.Player p0) { checkState(); return delegate.canSee(p0); } public void setWalkSpeed(float p0) { checkState(); delegate.setWalkSpeed(p0); } public int getFoodLevel() { checkState(); return delegate.getFoodLevel(); } public void kickPlayer(java.lang.String p0) { checkState(); delegate.kickPlayer(p0); } public void setExp(float p0) { checkState(); delegate.setExp(p0); } public int getLevel() { checkState(); return delegate.getLevel(); } public void playSound(org.bukkit.Location p0, org.bukkit.Sound p1, float p2, float p3) { checkState(); delegate.playSound(p0, p1, p2, p3); } public void playSound(org.bukkit.Location p0, java.lang.String p1, float p2, float p3) { checkState(); delegate.playSound(p0, p1, p2, p3); } public void chat(java.lang.String p0) { checkState(); delegate.chat(p0); } public void setSneaking(boolean p0) { checkState(); delegate.setSneaking(p0); } public void playNote(org.bukkit.Location p0, byte p1, byte p2) { checkState(); delegate.playNote(p0, p1, p2); } public void playNote(org.bukkit.Location p0, org.bukkit.Instrument p1, org.bukkit.Note p2) { checkState(); delegate.playNote(p0, p1, p2); } public void hidePlayer(org.bukkit.entity.Player p0) { checkState(); delegate.hidePlayer(p0); } public float getExp() { checkState(); return delegate.getExp(); } public void setFoodLevel(int p0) { checkState(); delegate.setFoodLevel(p0); } public void setFlySpeed(float p0) { checkState(); delegate.setFlySpeed(p0); } public void setFlying(boolean p0) { checkState(); delegate.setFlying(p0); } public void showPlayer(org.bukkit.entity.Player p0) { checkState(); delegate.showPlayer(p0); } public void setDisplayName(java.lang.String p0) { checkState(); delegate.setDisplayName(p0); } public java.lang.String getPlayerListName() { checkState(); return delegate.getPlayerListName(); } public void setPlayerListName(java.lang.String p0) { checkState(); delegate.setPlayerListName(p0); } public void setCompassTarget(org.bukkit.Location p0) { checkState(); delegate.setCompassTarget(p0); } public org.bukkit.Location getCompassTarget() { checkState(); return delegate.getCompassTarget(); } public void sendRawMessage(java.lang.String p0) { checkState(); delegate.sendRawMessage(p0); } public boolean performCommand(java.lang.String p0) { checkState(); return delegate.performCommand(p0); } public void setSleepingIgnored(boolean p0) { checkState(); delegate.setSleepingIgnored(p0); } public boolean isSleepingIgnored() { checkState(); return delegate.isSleepingIgnored(); } public void sendBlockChange(org.bukkit.Location p0, org.bukkit.Material p1, byte p2) { checkState(); delegate.sendBlockChange(p0, p1, p2); } public void sendBlockChange(org.bukkit.Location p0, int p1, byte p2) { checkState(); delegate.sendBlockChange(p0, p1, p2); } public boolean sendChunkChange(org.bukkit.Location p0, int p1, int p2, int p3, byte[] p4) { checkState(); return delegate.sendChunkChange(p0, p1, p2, p3, p4); } @SuppressWarnings("deprecation") public void updateInventory() { checkState(); delegate.updateInventory(); } public void awardAchievement(org.bukkit.Achievement p0) { checkState(); delegate.awardAchievement(p0); } public void incrementStatistic(org.bukkit.Statistic p0) { checkState(); delegate.incrementStatistic(p0); } public void incrementStatistic(org.bukkit.Statistic p0, int p1) { checkState(); delegate.incrementStatistic(p0, p1); } public void incrementStatistic(org.bukkit.Statistic p0, org.bukkit.Material p1, int p2) { checkState(); delegate.incrementStatistic(p0, p1, p2); } public void incrementStatistic(org.bukkit.Statistic p0, org.bukkit.Material p1) { checkState(); delegate.incrementStatistic(p0, p1); } public void setPlayerTime(long p0, boolean p1) { checkState(); delegate.setPlayerTime(p0, p1); } public long getPlayerTime() { checkState(); return delegate.getPlayerTime(); } public long getPlayerTimeOffset() { checkState(); return delegate.getPlayerTimeOffset(); } public boolean isPlayerTimeRelative() { checkState(); return delegate.isPlayerTimeRelative(); } public void resetPlayerTime() { checkState(); delegate.resetPlayerTime(); } public void setPlayerWeather(org.bukkit.WeatherType p0) { checkState(); delegate.setPlayerWeather(p0); } public org.bukkit.WeatherType getPlayerWeather() { checkState(); return delegate.getPlayerWeather(); } public void resetPlayerWeather() { checkState(); delegate.resetPlayerWeather(); } public void giveExpLevels(int p0) { checkState(); delegate.giveExpLevels(p0); } public int getTotalExperience() { checkState(); return delegate.getTotalExperience(); } public void setTotalExperience(int p0) { checkState(); delegate.setTotalExperience(p0); } public float getExhaustion() { checkState(); return delegate.getExhaustion(); } public void setExhaustion(float p0) { checkState(); delegate.setExhaustion(p0); } public float getSaturation() { checkState(); return delegate.getSaturation(); } public void setSaturation(float p0) { checkState(); delegate.setSaturation(p0); } public org.bukkit.Location getBedSpawnLocation() { checkState(); return delegate.getBedSpawnLocation(); } public void setBedSpawnLocation(org.bukkit.Location p0) { checkState(); delegate.setBedSpawnLocation(p0); } public void setBedSpawnLocation(org.bukkit.Location p0, boolean p1) { checkState(); delegate.setBedSpawnLocation(p0, p1); } public boolean getAllowFlight() { checkState(); return delegate.getAllowFlight(); } public void setAllowFlight(boolean p0) { checkState(); delegate.setAllowFlight(p0); } public void setTexturePack(java.lang.String p0) { checkState(); delegate.setTexturePack(p0); } public org.bukkit.scoreboard.Scoreboard getScoreboard() { checkState(); return delegate.getScoreboard(); } public void setScoreboard(org.bukkit.scoreboard.Scoreboard p0) { checkState(); delegate.setScoreboard(p0); } public boolean isHealthScaled() { checkState(); return delegate.isHealthScaled(); } public void setHealthScaled(boolean p0) { checkState(); delegate.setHealthScaled(p0); } public void setHealthScale(double p0) { checkState(); delegate.setHealthScale(p0); } public double getHealthScale() { checkState(); return delegate.getHealthScale(); } public java.lang.String getName() { checkState(); return delegate.getName(); } public org.bukkit.inventory.PlayerInventory getInventory() { checkState(); return delegate.getInventory(); } public boolean isSleeping() { checkState(); return delegate.isSleeping(); } public org.bukkit.GameMode getGameMode() { checkState(); return delegate.getGameMode(); } public void setGameMode(org.bukkit.GameMode p0) { checkState(); delegate.setGameMode(p0); } public boolean isBlocking() { checkState(); return delegate.isBlocking(); } public org.bukkit.inventory.Inventory getEnderChest() { checkState(); return delegate.getEnderChest(); } public boolean setWindowProperty(org.bukkit.inventory.InventoryView.Property p0, int p1) { checkState(); return delegate.setWindowProperty(p0, p1); } public org.bukkit.inventory.InventoryView getOpenInventory() { checkState(); return delegate.getOpenInventory(); } public void openInventory(org.bukkit.inventory.InventoryView p0) { checkState(); delegate.openInventory(p0); } public org.bukkit.inventory.InventoryView openInventory(org.bukkit.inventory.Inventory p0) { checkState(); return delegate.openInventory(p0); } public org.bukkit.inventory.InventoryView openWorkbench(org.bukkit.Location p0, boolean p1) { checkState(); return delegate.openWorkbench(p0, p1); } public org.bukkit.inventory.InventoryView openEnchanting(org.bukkit.Location p0, boolean p1) { checkState(); return delegate.openEnchanting(p0, p1); } public void closeInventory() { checkState(); delegate.closeInventory(); } public org.bukkit.inventory.ItemStack getItemInHand() { checkState(); return delegate.getItemInHand(); } public void setItemInHand(org.bukkit.inventory.ItemStack p0) { checkState(); delegate.setItemInHand(p0); } public org.bukkit.inventory.ItemStack getItemOnCursor() { checkState(); return delegate.getItemOnCursor(); } public void setItemOnCursor(org.bukkit.inventory.ItemStack p0) { checkState(); delegate.setItemOnCursor(p0); } public int getSleepTicks() { checkState(); return delegate.getSleepTicks(); } public int getExpToLevel() { checkState(); return delegate.getExpToLevel(); } public double getEyeHeight() { checkState(); return delegate.getEyeHeight(); } public double getEyeHeight(boolean p0) { checkState(); return delegate.getEyeHeight(p0); } @SuppressWarnings("deprecation") public org.bukkit.entity.Egg throwEgg() { checkState(); return delegate.throwEgg(); } @SuppressWarnings("deprecation") public org.bukkit.entity.Arrow shootArrow() { checkState(); return delegate.shootArrow(); } public org.bukkit.entity.Player getKiller() { checkState(); return delegate.getKiller(); } public org.bukkit.inventory.EntityEquipment getEquipment() { checkState(); return delegate.getEquipment(); } public boolean isLeashed() { checkState(); return delegate.isLeashed(); } public org.bukkit.Location getEyeLocation() { checkState(); return delegate.getEyeLocation(); } public java.util.List<org.bukkit.block.Block> getLineOfSight(java.util.HashSet<java.lang.Byte> p0, int p1) { checkState(); return delegate.getLineOfSight(p0, p1); } public org.bukkit.block.Block getTargetBlock(java.util.HashSet<java.lang.Byte> p0, int p1) { checkState(); return delegate.getTargetBlock(p0, p1); } public java.util.List<org.bukkit.block.Block> getLastTwoTargetBlocks(java.util.HashSet<java.lang.Byte> p0, int p1) { checkState(); return delegate.getLastTwoTargetBlocks(p0, p1); } @SuppressWarnings("deprecation") public org.bukkit.entity.Snowball throwSnowball() { checkState(); return delegate.throwSnowball(); } public <T extends org.bukkit.entity.Projectile> T launchProjectile(java.lang.Class<? extends T> p0) { checkState(); return delegate.launchProjectile(p0); } public int getRemainingAir() { checkState(); return delegate.getRemainingAir(); } public void setRemainingAir(int p0) { checkState(); delegate.setRemainingAir(p0); } public int getMaximumAir() { checkState(); return delegate.getMaximumAir(); } public void setMaximumAir(int p0) { checkState(); delegate.setMaximumAir(p0); } public int getMaximumNoDamageTicks() { checkState(); return delegate.getMaximumNoDamageTicks(); } public void setMaximumNoDamageTicks(int p0) { checkState(); delegate.setMaximumNoDamageTicks(p0); } public double getLastDamage() { checkState(); return delegate.getLastDamage(); } @SuppressWarnings("deprecation") public int _INVALID_getLastDamage() { checkState(); return delegate._INVALID_getLastDamage(); } public void setLastDamage(double p0) { checkState(); delegate.setLastDamage(p0); } @SuppressWarnings("deprecation") public void _INVALID_setLastDamage(int p0) { checkState(); delegate._INVALID_setLastDamage(p0); } public int getNoDamageTicks() { checkState(); return delegate.getNoDamageTicks(); } public void setNoDamageTicks(int p0) { checkState(); delegate.setNoDamageTicks(p0); } public boolean addPotionEffect(org.bukkit.potion.PotionEffect p0) { checkState(); return delegate.addPotionEffect(p0); } public boolean addPotionEffect(org.bukkit.potion.PotionEffect p0, boolean p1) { checkState(); return delegate.addPotionEffect(p0, p1); } public boolean addPotionEffects(java.util.Collection<org.bukkit.potion.PotionEffect> p0) { checkState(); return delegate.addPotionEffects(p0); } public boolean hasPotionEffect(org.bukkit.potion.PotionEffectType p0) { checkState(); return delegate.hasPotionEffect(p0); } public void removePotionEffect(org.bukkit.potion.PotionEffectType p0) { checkState(); delegate.removePotionEffect(p0); } public java.util.Collection<org.bukkit.potion.PotionEffect> getActivePotionEffects() { checkState(); return delegate.getActivePotionEffects(); } public boolean hasLineOfSight(org.bukkit.entity.Entity p0) { checkState(); return delegate.hasLineOfSight(p0); } public boolean getRemoveWhenFarAway() { checkState(); return delegate.getRemoveWhenFarAway(); } public void setRemoveWhenFarAway(boolean p0) { checkState(); delegate.setRemoveWhenFarAway(p0); } public void setCanPickupItems(boolean p0) { checkState(); delegate.setCanPickupItems(p0); } public boolean getCanPickupItems() { checkState(); return delegate.getCanPickupItems(); } public void setCustomName(java.lang.String p0) { checkState(); delegate.setCustomName(p0); } public java.lang.String getCustomName() { checkState(); return delegate.getCustomName(); } public void setCustomNameVisible(boolean p0) { checkState(); delegate.setCustomNameVisible(p0); } public boolean isCustomNameVisible() { checkState(); return delegate.isCustomNameVisible(); } public org.bukkit.entity.Entity getLeashHolder() { checkState(); return delegate.getLeashHolder(); } public boolean setLeashHolder(org.bukkit.entity.Entity p0) { checkState(); return delegate.setLeashHolder(p0); } public void remove() { checkState(); delegate.remove(); } public boolean isEmpty() { checkState(); return delegate.isEmpty(); } public org.bukkit.Location getLocation() { checkState(); return delegate.getLocation(); } public org.bukkit.Location getLocation(org.bukkit.Location p0) { checkState(); return delegate.getLocation(p0); } public org.bukkit.entity.EntityType getType() { checkState(); return delegate.getType(); } public boolean isValid() { checkState(); return delegate.isValid(); } public void playEffect(org.bukkit.EntityEffect p0) { checkState(); delegate.playEffect(p0); } public void setVelocity(org.bukkit.util.Vector p0) { checkState(); delegate.setVelocity(p0); } public org.bukkit.util.Vector getVelocity() { checkState(); return delegate.getVelocity(); } public org.bukkit.World getWorld() { checkState(); return delegate.getWorld(); } public boolean teleport(org.bukkit.entity.Entity p0, org.bukkit.event.player.PlayerTeleportEvent.TeleportCause p1) { checkState(); return delegate.teleport(p0, p1); } public boolean teleport(org.bukkit.Location p0, org.bukkit.event.player.PlayerTeleportEvent.TeleportCause p1) { checkState(); return delegate.teleport(p0, p1); } public boolean teleport(org.bukkit.Location p0) { checkState(); return delegate.teleport(p0); } public boolean teleport(org.bukkit.entity.Entity p0) { checkState(); return delegate.teleport(p0); } public int getEntityId() { checkState(); return delegate.getEntityId(); } public int getFireTicks() { checkState(); return delegate.getFireTicks(); } public void setFireTicks(int p0) { checkState(); delegate.setFireTicks(p0); } public boolean isDead() { checkState(); return delegate.isDead(); } public org.bukkit.Server getServer() { checkState(); return delegate.getServer(); } public org.bukkit.entity.Entity getPassenger() { checkState(); return delegate.getPassenger(); } public boolean setPassenger(org.bukkit.entity.Entity p0) { checkState(); return delegate.setPassenger(p0); } public boolean eject() { checkState(); return delegate.eject(); } public java.util.UUID getUniqueId() { checkState(); return delegate.getUniqueId(); } public boolean leaveVehicle() { checkState(); return delegate.leaveVehicle(); } public org.bukkit.entity.Entity getVehicle() { checkState(); return delegate.getVehicle(); } public java.util.List<org.bukkit.entity.Entity> getNearbyEntities(double p0, double p1, double p2) { checkState(); return delegate.getNearbyEntities(p0, p1, p2); } public int getMaxFireTicks() { checkState(); return delegate.getMaxFireTicks(); } public float getFallDistance() { checkState(); return delegate.getFallDistance(); } public void setFallDistance(float p0) { checkState(); delegate.setFallDistance(p0); } public void setLastDamageCause(org.bukkit.event.entity.EntityDamageEvent p0) { checkState(); delegate.setLastDamageCause(p0); } public org.bukkit.event.entity.EntityDamageEvent getLastDamageCause() { checkState(); return delegate.getLastDamageCause(); } public int getTicksLived() { checkState(); return delegate.getTicksLived(); } public void setTicksLived(int p0) { checkState(); delegate.setTicksLived(p0); } public boolean isInsideVehicle() { checkState(); return delegate.isInsideVehicle(); } public void setMetadata(java.lang.String p0, org.bukkit.metadata.MetadataValue p1) { checkState(); delegate.setMetadata(p0, p1); } public java.util.List<org.bukkit.metadata.MetadataValue> getMetadata(java.lang.String p0) { checkState(); return delegate.getMetadata(p0); } public boolean hasMetadata(java.lang.String p0) { checkState(); return delegate.hasMetadata(p0); } public void removeMetadata(java.lang.String p0, org.bukkit.plugin.Plugin p1) { checkState(); delegate.removeMetadata(p0, p1); } public void damage(double p0, org.bukkit.entity.Entity p1) { checkState(); delegate.damage(p0, p1); } public void damage(double p0) { checkState(); delegate.damage(p0); } public double getHealth() { checkState(); return delegate.getHealth(); } public void setHealth(double p0) { checkState(); delegate.setHealth(p0); } public double getMaxHealth() { checkState(); return delegate.getMaxHealth(); } public void setMaxHealth(double p0) { checkState(); delegate.setMaxHealth(p0); } @SuppressWarnings("deprecation") public void _INVALID_damage(int p0, org.bukkit.entity.Entity p1) { checkState(); delegate._INVALID_damage(p0, p1); } @SuppressWarnings("deprecation") public void _INVALID_damage(int p0) { checkState(); delegate._INVALID_damage(p0); } @SuppressWarnings("deprecation") public int _INVALID_getHealth() { checkState(); return delegate._INVALID_getHealth(); } @SuppressWarnings("deprecation") public void _INVALID_setHealth(int p0) { checkState(); delegate._INVALID_setHealth(p0); } @SuppressWarnings("deprecation") public int _INVALID_getMaxHealth() { checkState(); return delegate._INVALID_getMaxHealth(); } @SuppressWarnings("deprecation") public void _INVALID_setMaxHealth(int p0) { checkState(); delegate._INVALID_setMaxHealth(p0); } public void resetMaxHealth() { checkState(); delegate.resetMaxHealth(); } public boolean hasPermission(org.bukkit.permissions.Permission p0) { checkState(); return delegate.hasPermission(p0); } public boolean hasPermission(java.lang.String p0) { checkState(); return delegate.hasPermission(p0); } public boolean isPermissionSet(java.lang.String p0) { checkState(); return delegate.isPermissionSet(p0); } public boolean isPermissionSet(org.bukkit.permissions.Permission p0) { checkState(); return delegate.isPermissionSet(p0); } public org.bukkit.permissions.PermissionAttachment addAttachment(org.bukkit.plugin.Plugin p0, int p1) { checkState(); return delegate.addAttachment(p0, p1); } public org.bukkit.permissions.PermissionAttachment addAttachment(org.bukkit.plugin.Plugin p0, java.lang.String p1, boolean p2, int p3) { checkState(); return delegate.addAttachment(p0, p1, p2, p3); } public org.bukkit.permissions.PermissionAttachment addAttachment(org.bukkit.plugin.Plugin p0) { checkState(); return delegate.addAttachment(p0); } public org.bukkit.permissions.PermissionAttachment addAttachment(org.bukkit.plugin.Plugin p0, java.lang.String p1, boolean p2) { checkState(); return delegate.addAttachment(p0, p1, p2); } public void removeAttachment(org.bukkit.permissions.PermissionAttachment p0) { checkState(); delegate.removeAttachment(p0); } public void recalculatePermissions() { checkState(); delegate.recalculatePermissions(); } public java.util.Set<org.bukkit.permissions.PermissionAttachmentInfo> getEffectivePermissions() { checkState(); return delegate.getEffectivePermissions(); } public boolean isOp() { checkState(); return delegate.isOp(); } public void setOp(boolean p0) { checkState(); delegate.setOp(p0); } public boolean isConversing() { checkState(); return delegate.isConversing(); } public void acceptConversationInput(java.lang.String p0) { checkState(); delegate.acceptConversationInput(p0); } public boolean beginConversation(org.bukkit.conversations.Conversation p0) { checkState(); return delegate.beginConversation(p0); } public void abandonConversation(org.bukkit.conversations.Conversation p0) { checkState(); delegate.abandonConversation(p0); } public void abandonConversation(org.bukkit.conversations.Conversation p0, org.bukkit.conversations.ConversationAbandonedEvent p1) { checkState(); delegate.abandonConversation(p0, p1); } public void sendMessage(java.lang.String[] p0) { checkState(); delegate.sendMessage(p0); } public void sendMessage(java.lang.String p0) { checkState(); delegate.sendMessage(p0); } public boolean isOnline() { checkState(); return delegate.isOnline(); } public boolean isBanned() { checkState(); return delegate.isBanned(); } public void setBanned(boolean p0) { checkState(); delegate.setBanned(p0); } public org.bukkit.entity.Player getPlayer() { checkState(); return delegate.getPlayer(); } public boolean isWhitelisted() { checkState(); return delegate.isWhitelisted(); } public void setWhitelisted(boolean p0) { checkState(); delegate.setWhitelisted(p0); } public long getFirstPlayed() { checkState(); return delegate.getFirstPlayed(); } public long getLastPlayed() { checkState(); return delegate.getLastPlayed(); } public boolean hasPlayedBefore() { checkState(); return delegate.hasPlayedBefore(); } public java.util.Map<java.lang.String, java.lang.Object> serialize() { checkState(); return delegate.serialize(); } public void sendPluginMessage(org.bukkit.plugin.Plugin p0, java.lang.String p1, byte[] p2) { checkState(); delegate.sendPluginMessage(p0, p1, p2); } public java.util.Set<java.lang.String> getListeningPluginChannels() { checkState(); return delegate.getListeningPluginChannels(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.diff.impl.util; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.impl.EditingSides; import com.intellij.openapi.diff.impl.highlighting.FragmentSide; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.editor.ScrollingModel; import com.intellij.openapi.editor.event.VisibleAreaEvent; import com.intellij.openapi.editor.event.VisibleAreaListener; import com.intellij.openapi.util.Disposer; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.util.List; public class SyncScrollSupport implements Disposable { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.diff.impl.util.SyncScrollSupport"); private boolean myDuringVerticalScroll = false; @NotNull private final List<ScrollListener> myScrollers = ContainerUtil.createLockFreeCopyOnWriteList(); private boolean myEnabled = true; public void install(EditingSides[] sideContainers) { Disposer.dispose(this); Editor[] editors = new Editor[sideContainers.length + 1]; editors[0] = sideContainers[0].getEditor(FragmentSide.SIDE1); for (int i = 0; i < sideContainers.length; i++) { EditingSides sideContainer = sideContainers[i]; LOG.assertTrue(sideContainer.getEditor(FragmentSide.SIDE1) == editors[i]); editors[i + 1] = sideContainer.getEditor(FragmentSide.SIDE2); } if (editors.length == 3) install3(editors, sideContainers); else if (editors.length == 2) install2(editors, sideContainers); else LOG.error(String.valueOf(editors.length)); } @Override public void dispose() { for (ScrollListener scrollListener : myScrollers) { Disposer.dispose(scrollListener); } myScrollers.clear(); } public void setEnabled(boolean enabled) { myEnabled = enabled; } public boolean isEnabled() { return myEnabled; } private void install2(@NotNull Editor[] editors, @NotNull EditingSides[] sideContainers) { addSlavesScroller(editors[0], new ScrollingContext(FragmentSide.SIDE1, sideContainers[0], FragmentSide.SIDE1)); addSlavesScroller(editors[1], new ScrollingContext(FragmentSide.SIDE2, sideContainers[0], FragmentSide.SIDE2)); } private void install3(@NotNull Editor[] editors, @NotNull EditingSides[] sideContainers) { addSlavesScroller(editors[0], new ScrollingContext(FragmentSide.SIDE1, sideContainers[0], FragmentSide.SIDE2), new ScrollingContext(FragmentSide.SIDE1, sideContainers[1], FragmentSide.SIDE1)); addSlavesScroller(editors[1], new ScrollingContext(FragmentSide.SIDE2, sideContainers[0], FragmentSide.SIDE1), new ScrollingContext(FragmentSide.SIDE1, sideContainers[1], FragmentSide.SIDE1)); addSlavesScroller(editors[2], new ScrollingContext(FragmentSide.SIDE2, sideContainers[1], FragmentSide.SIDE2), new ScrollingContext(FragmentSide.SIDE2, sideContainers[0], FragmentSide.SIDE1)); } private void addSlavesScroller(@NotNull Editor editor, @NotNull ScrollingContext... contexts) { ScrollListener scroller = new ScrollListener(contexts, editor); scroller.install(); myScrollers.add(scroller); } private class ScrollListener implements VisibleAreaListener, Disposable { private ScrollingContext[] myScrollContexts; @NotNull private final Editor myEditor; ScrollListener(@NotNull ScrollingContext[] scrollContexts, @NotNull Editor editor) { myScrollContexts = scrollContexts; myEditor = editor; } public void install() { myEditor.getScrollingModel().addVisibleAreaListener(this); } @Override public void dispose() { myEditor.getScrollingModel().removeVisibleAreaListener(this); myScrollContexts = null; } @Override public void visibleAreaChanged(@NotNull VisibleAreaEvent e) { if (!myEnabled || myDuringVerticalScroll) return; Rectangle newRectangle = e.getNewRectangle(); Rectangle oldRectangle = e.getOldRectangle(); if (newRectangle == null || oldRectangle == null) return; myDuringVerticalScroll = true; try { for (ScrollingContext context : myScrollContexts) { syncVerticalScroll(context, newRectangle, oldRectangle); syncHorizontalScroll(context, newRectangle, oldRectangle); } } finally { myDuringVerticalScroll = false; } } } private static void syncHorizontalScroll(@NotNull ScrollingContext context, @NotNull Rectangle newRectangle, @NotNull Rectangle oldRectangle) { int newScrollOffset = newRectangle.x; if (newScrollOffset == oldRectangle.x) return; EditingSides sidesContainer = context.getSidesContainer(); FragmentSide masterSide = context.getMasterSide(); Editor slaveEditor = sidesContainer.getEditor(masterSide.otherSide()); if (slaveEditor == null) return; doScrollHorizontally(slaveEditor.getScrollingModel(), newScrollOffset); } private static void syncVerticalScroll(@NotNull ScrollingContext context, @NotNull Rectangle newRectangle, @NotNull Rectangle oldRectangle) { if (newRectangle.y == oldRectangle.y) return; EditingSides sidesContainer = context.getSidesContainer(); FragmentSide masterSide = context.getMasterSide(); FragmentSide masterDiffSide = context.getMasterDiffSide(); Editor master = sidesContainer.getEditor(masterSide); Editor slave = sidesContainer.getEditor(masterSide.otherSide()); if (master == null || slave == null) return; if (master.isDisposed() || slave.isDisposed()) return; int masterVerticalScrollOffset = master.getScrollingModel().getVerticalScrollOffset(); int slaveVerticalScrollOffset = slave.getScrollingModel().getVerticalScrollOffset(); Rectangle viewRect = master.getScrollingModel().getVisibleArea(); int middleY = viewRect.height / 3; if (master.getDocument().getTextLength() == 0) return; LogicalPosition masterPos = master.xyToLogicalPosition(new Point(viewRect.x, masterVerticalScrollOffset + middleY)); int masterCenterLine = masterPos.line; int scrollToLine = sidesContainer.getLineBlocks().transform(masterDiffSide, masterCenterLine); int offset; if (scrollToLine < 0) { offset = slaveVerticalScrollOffset + newRectangle.y - oldRectangle.y; } else { int correction = (masterVerticalScrollOffset + middleY) % master.getLineHeight(); Point point = slave.logicalPositionToXY(new LogicalPosition(scrollToLine, masterPos.column)); offset = point.y - middleY + correction; } int deltaHeaderOffset = getHeaderOffset(slave) - getHeaderOffset(master); doScrollVertically(slave.getScrollingModel(), offset + deltaHeaderOffset); } private static int getHeaderOffset(@NotNull final Editor editor) { final JComponent header = editor.getHeaderComponent(); return header == null ? 0 : header.getHeight(); } private static void doScrollVertically(@NotNull ScrollingModel model, int offset) { model.disableAnimation(); try { model.scrollVertically(offset); } finally { model.enableAnimation(); } } private static void doScrollHorizontally(@NotNull ScrollingModel model, int offset) { model.disableAnimation(); try { model.scrollHorizontally(offset); } finally { model.enableAnimation(); } } public static void scrollEditor(@NotNull Editor editor, int logicalLine) { editor.getCaretModel().moveToLogicalPosition(new LogicalPosition(logicalLine, 0)); ScrollingModel scrollingModel = editor.getScrollingModel(); scrollingModel.disableAnimation(); scrollingModel.scrollToCaret(ScrollType.CENTER); scrollingModel.enableAnimation(); } private static class ScrollingContext { @NotNull private final EditingSides mySidesContainer; @NotNull private final FragmentSide myMasterSide; @NotNull private final FragmentSide myMasterDiffSide; ScrollingContext(@NotNull FragmentSide masterSide, @NotNull EditingSides sidesContainer, @NotNull FragmentSide masterDiffSide) { mySidesContainer = sidesContainer; myMasterSide = masterSide; myMasterDiffSide = masterDiffSide; } @NotNull public EditingSides getSidesContainer() { return mySidesContainer; } @NotNull public FragmentSide getMasterSide() { return myMasterSide; } @NotNull public FragmentSide getMasterDiffSide() { return myMasterDiffSide; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query.continuous; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.query.*; import org.apache.ignite.cache.store.*; import org.apache.ignite.cluster.*; import org.apache.ignite.configuration.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.processors.continuous.*; import org.apache.ignite.internal.processors.datastructures.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.marshaller.optimized.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import org.jetbrains.annotations.*; import org.jsr166.*; import javax.cache.*; import javax.cache.configuration.*; import javax.cache.event.*; import javax.cache.integration.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.cache.CacheRebalanceMode.*; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.processors.cache.query.CacheQueryType.*; /** * Continuous queries tests. */ public abstract class GridCacheContinuousQueryAbstractSelfTest extends GridCommonAbstractTest { /** IP finder. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Latch timeout. */ protected static final long LATCH_TIMEOUT = 5000; /** */ private static final String NO_CACHE_GRID_NAME = "noCacheGrid"; /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); cfg.setPeerClassLoadingEnabled(peerClassLoadingEnabled()); if (!gridName.equals(NO_CACHE_GRID_NAME)) { CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(cacheMode()); cacheCfg.setAtomicityMode(atomicityMode()); cacheCfg.setNearConfiguration(nearConfiguration()); cacheCfg.setRebalanceMode(ASYNC); cacheCfg.setWriteSynchronizationMode(FULL_SYNC); cacheCfg.setCacheStoreFactory(new StoreFactory()); cacheCfg.setReadThrough(true); cacheCfg.setWriteThrough(true); cacheCfg.setLoadPreviousValue(true); cfg.setCacheConfiguration(cacheCfg); } else cfg.setClientMode(true); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(disco); cfg.setMarshaller(new OptimizedMarshaller(false)); return cfg; } /** * @return Peer class loading enabled flag. */ protected boolean peerClassLoadingEnabled() { return true; } /** * @return Distribution. */ protected NearCacheConfiguration nearConfiguration() { return new NearCacheConfiguration(); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGridsMultiThreaded(gridCount()); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { GridTestUtils.waitForCondition(new PA() { @Override public boolean apply() { for (int i = 0; i < gridCount(); i++) { if (grid(i).cluster().nodes().size() != gridCount()) return false; } return true; } }, 3000); for (int i = 0; i < gridCount(); i++) assertEquals(gridCount(), grid(i).cluster().nodes().size()); for (int i = 0; i < gridCount(); i++) { for (int j = 0; j < 5; j++) { try { IgniteCache<Object, Object> cache = grid(i).cache(null); for (Cache.Entry<Object, Object> entry : cache.localEntries(new CachePeekMode[] {CachePeekMode.ALL})) cache.remove(entry.getKey()); break; } catch (IgniteException e) { if (j == 4) throw new Exception("Failed to clear cache for grid: " + i, e); U.warn(log, "Failed to clear cache for grid (will retry in 500 ms) [gridIdx=" + i + ", err=" + e.getMessage() + ']'); U.sleep(500); } } } for (int i = 0; i < gridCount(); i++) assertEquals("Cache is not empty [entrySet=" + grid(i).cache(null).localEntries() + ", i=" + i + ']', 0, grid(i).cache(null).localSize()); for (int i = 0; i < gridCount(); i++) { GridContinuousProcessor proc = grid(i).context().continuous(); assertEquals(String.valueOf(i), 2, ((Map)U.field(proc, "locInfos")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "rmtInfos")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "startFuts")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "waitForStartAck")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "stopFuts")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "waitForStopAck")).size()); assertEquals(String.valueOf(i), 0, ((Map)U.field(proc, "pending")).size()); CacheContinuousQueryManager mgr = grid(i).context().cache().internalCache().context().continuousQueries(); assertEquals(0, ((Map)U.field(mgr, "lsnrs")).size()); } } /** * @return Cache mode. */ protected abstract CacheMode cacheMode(); /** * @return Atomicity mode. */ protected CacheAtomicityMode atomicityMode() { return TRANSACTIONAL; } /** * @return Grids count. */ protected abstract int gridCount(); /** * @throws Exception If failed. */ @SuppressWarnings("ThrowableResultOfMethodCallIgnored") public void testIllegalArguments() throws Exception { final ContinuousQuery<Object, Object> q = new ContinuousQuery<>(); GridTestUtils.assertThrows( log, new Callable<Object>() { @Override public Object call() throws Exception { q.setPageSize(-1); return null; } }, IllegalArgumentException.class, null ); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { q.setPageSize(0); return null; } }, IllegalArgumentException.class, null ); GridTestUtils.assertThrows( log, new Callable<Object>() { @Override public Object call() throws Exception { q.setTimeInterval(-1); return null; } }, IllegalArgumentException.class, null ); } /** * @throws Exception If failed. */ public void testAllEntries() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, List<Integer>> map = new HashMap<>(); final CountDownLatch latch = new CountDownLatch(5); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { synchronized (map) { List<Integer> vals = map.get(e.getKey()); if (vals == null) { vals = new ArrayList<>(); map.put(e.getKey(), vals); } vals.add(e.getValue()); } latch.countDown(); } } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { cache.put(1, 1); cache.put(2, 2); cache.put(3, 3); cache.remove(2); cache.put(1, 10); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(3, map.size()); List<Integer> vals = map.get(1); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals(1, (int)vals.get(0)); assertEquals(10, (int)vals.get(1)); vals = map.get(2); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals(2, (int)vals.get(0)); assertNull(vals.get(1)); vals = map.get(3); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals(3, (int)vals.get(0)); } } /** * @throws Exception If failed. */ public void testFilterException() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { // No-op. } }); qry.setRemoteFilter(new CacheEntryEventSerializableFilter<Integer, Integer>() { @Override public boolean evaluate(CacheEntryEvent<? extends Integer, ? extends Integer> evt) { throw new RuntimeException("Test error."); } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { for (int i = 0; i < 100; i++) cache.put(i, i); } } /** * @throws Exception If failed. */ public void testEntriesByFilter() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, List<Integer>> map = new HashMap<>(); final CountDownLatch latch = new CountDownLatch(4); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { synchronized (map) { List<Integer> vals = map.get(e.getKey()); if (vals == null) { vals = new ArrayList<>(); map.put(e.getKey(), vals); } vals.add(e.getValue()); } latch.countDown(); } } }); qry.setRemoteFilter(new CacheEntryEventSerializableFilter<Integer,Integer>() { @Override public boolean evaluate(CacheEntryEvent<? extends Integer,? extends Integer> evt) { return evt.getKey() > 2; } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { cache.put(1, 1); cache.put(2, 2); cache.put(3, 3); cache.put(4, 4); cache.remove(2); cache.remove(3); cache.put(1, 10); cache.put(4, 40); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(2, map.size()); List<Integer> vals = map.get(3); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals(3, (int)vals.get(0)); assertNull(vals.get(1)); vals = map.get(4); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals(4, (int)vals.get(0)); assertEquals(40, (int)vals.get(1)); } } /** * @throws Exception If failed. */ public void testLocalNodeOnly() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); if (grid(0).cache(null).getConfiguration(CacheConfiguration.class).getCacheMode() != PARTITIONED) return; ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, List<Integer>> map = new HashMap<>(); final CountDownLatch latch = new CountDownLatch(1); qry.setLocalListener(new CacheEntryUpdatedListener<Integer,Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer,? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer,? extends Integer> e : evts) { synchronized (map) { List<Integer> vals = map.get(e.getKey()); if (vals == null) { vals = new ArrayList<>(); map.put(e.getKey(), vals); } vals.add(e.getValue()); } latch.countDown(); } } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry.setLocal(true))) { int locKey = -1; int rmtKey = -1; int key = 0; while (true) { ClusterNode n = grid(0).cluster().mapKeyToNode(null, key); assert n != null; if (n.equals(grid(0).localNode())) locKey = key; else rmtKey = key; key++; if (locKey >= 0 && rmtKey >= 0) break; } cache.put(locKey, 1); cache.put(rmtKey, 2); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(1, map.size()); List<Integer> vals = map.get(locKey); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals(1, (int)vals.get(0)); } } /** * @throws Exception If failed. */ public void testBuffering() throws Exception { if (grid(0).cache(null).getConfiguration(CacheConfiguration.class).getCacheMode() != PARTITIONED) return; IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, List<Integer>> map = new HashMap<>(); final CountDownLatch latch = new CountDownLatch(5); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { synchronized (map) { List<Integer> vals = map.get(e.getKey()); if (vals == null) { vals = new ArrayList<>(); map.put(e.getKey(), vals); } vals.add(e.getValue()); } latch.countDown(); } } }); qry.setPageSize(5); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { ClusterNode node = F.first(grid(0).cluster().forRemotes().nodes()); Collection<Integer> keys = new HashSet<>(); int key = 0; while (true) { ClusterNode n = grid(0).cluster().mapKeyToNode(null, key); assert n != null; if (n.equals(node)) keys.add(key); key++; if (keys.size() == 6) break; } Iterator<Integer> it = keys.iterator(); for (int i = 0; i < 4; i++) cache.put(it.next(), 0); assert !latch.await(2, SECONDS); for (int i = 0; i < 2; i++) cache.put(it.next(), 0); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(5, map.size()); it = keys.iterator(); for (int i = 0; i < 5; i++) { Integer k = it.next(); List<Integer> vals = map.get(k); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals(0, (int)vals.get(0)); } } } /** * @throws Exception If failed. */ public void testTimeInterval() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); if (cache.getConfiguration(CacheConfiguration.class).getCacheMode() != PARTITIONED) return; ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, List<Integer>> map = new HashMap<>(); final CountDownLatch latch = new CountDownLatch(5); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { synchronized (map) { List<Integer> vals = map.get(e.getKey()); if (vals == null) { vals = new ArrayList<>(); map.put(e.getKey(), vals); } vals.add(e.getValue()); } latch.countDown(); } } }); qry.setPageSize(10); qry.setTimeInterval(3000); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { ClusterNode node = F.first(grid(0).cluster().forRemotes().nodes()); Collection<Integer> keys = new HashSet<>(); int key = 0; while (true) { ClusterNode n = grid(0).cluster().mapKeyToNode(null, key); assert n != null; if (n.equals(node)) keys.add(key); key++; if (keys.size() == 5) break; } for (Integer k : keys) cache.put(k, 0); assert !latch.await(2, SECONDS); assert latch.await(1000 + LATCH_TIMEOUT, MILLISECONDS); assertEquals(5, map.size()); Iterator<Integer> it = keys.iterator(); for (int i = 0; i < 5; i++) { Integer k = it.next(); List<Integer> vals = map.get(k); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals(0, (int)vals.get(0)); } } } /** * @throws Exception If failed. */ public void testInitialQuery() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setInitialQuery(new ScanQuery<>(new P2<Integer, Integer>() { @Override public boolean apply(Integer k, Integer v) { return k >= 5; } })); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { assert false; } }); for (int i = 0; i < 10; i++) cache.put(i, i); try (QueryCursor<Cache.Entry<Integer, Integer>> cur = cache.query(qry)) { List<Cache.Entry<Integer, Integer>> res = cur.getAll(); Collections.sort(res, new Comparator<Cache.Entry<Integer, Integer>>() { @Override public int compare(Cache.Entry<Integer, Integer> e1, Cache.Entry<Integer, Integer> e2) { return e1.getKey().compareTo(e2.getKey()); } }); assertEquals(5, res.size()); int exp = 5; for (Cache.Entry<Integer, Integer> e : res) { assertEquals(exp, e.getKey().intValue()); assertEquals(exp, e.getValue().intValue()); exp++; } } } /** * @throws Exception If failed. */ public void testInitialQueryAndUpdates() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setInitialQuery(new ScanQuery<>(new P2<Integer, Integer>() { @Override public boolean apply(Integer k, Integer v) { return k >= 5; } })); final Map<Integer, Integer> map = new ConcurrentHashMap8<>(); final CountDownLatch latch = new CountDownLatch(2); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { map.put(e.getKey(), e.getValue()); latch.countDown(); } } }); for (int i = 0; i < 10; i++) cache.put(i, i); try (QueryCursor<Cache.Entry<Integer, Integer>> cur = cache.query(qry)) { List<Cache.Entry<Integer, Integer>> res = cur.getAll(); Collections.sort(res, new Comparator<Cache.Entry<Integer, Integer>>() { @Override public int compare(Cache.Entry<Integer, Integer> e1, Cache.Entry<Integer, Integer> e2) { return e1.getKey().compareTo(e2.getKey()); } }); assertEquals(5, res.size()); int exp = 5; for (Cache.Entry<Integer, Integer> e : res) { assertEquals(exp, e.getKey().intValue()); assertEquals(exp, e.getValue().intValue()); exp++; } cache.put(10, 10); cache.put(11, 11); assert latch.await(LATCH_TIMEOUT, MILLISECONDS) : latch.getCount(); assertEquals(2, map.size()); for (int i = 11; i < 12; i++) assertEquals(i, (int)map.get(i)); } } /** * @throws Exception If failed. */ public void testLoadCache() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Map<Integer, Integer> map = new ConcurrentHashMap8<>(); final CountDownLatch latch = new CountDownLatch(10); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { map.put(e.getKey(), e.getValue()); latch.countDown(); } } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { cache.loadCache(null, 0); assert latch.await(LATCH_TIMEOUT, MILLISECONDS) : "Count: " + latch.getCount(); assertEquals(10, map.size()); for (int i = 0; i < 10; i++) assertEquals(i, (int)map.get(i)); } } /** * @throws Exception If failed. */ public void testInternalKey() throws Exception { if (atomicityMode() == ATOMIC) return; IgniteCache<Object, Object> cache = grid(0).cache(null); ContinuousQuery<Object, Object> qry = new ContinuousQuery<>(); final Map<Object, Object> map = new ConcurrentHashMap8<>(); final CountDownLatch latch = new CountDownLatch(2); qry.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() { @Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> evts) { for (CacheEntryEvent<?, ?> e : evts) { map.put(e.getKey(), e.getValue()); latch.countDown(); } } }); try (QueryCursor<Cache.Entry<Object, Object>> ignored = cache.query(qry)) { cache.put(new GridCacheInternalKeyImpl("test"), 1); cache.put(1, 1); cache.put(2, 2); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(2, map.size()); assertEquals(1, (int)map.get(1)); assertEquals(2, (int)map.get(2)); } } /** * @throws Exception If failed. */ public void testNodeJoin() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final Collection<CacheEntryEvent<? extends Integer, ? extends Integer>> all = new ConcurrentLinkedDeque8<>(); final CountDownLatch latch = new CountDownLatch(30); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { for (CacheEntryEvent<? extends Integer, ? extends Integer> evt : evts) all.add(evt); latch.countDown(); } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { cache.put(0, 0); startGrid("anotherGrid"); for (int i = 1; i < 30; i++) { cache.put(i, i); } assert latch.await(LATCH_TIMEOUT, MILLISECONDS) : all; assertEquals(30, all.size()); } finally { stopGrid("anotherGrid"); } } /** * @throws Exception If failed. */ @SuppressWarnings("TryFinallyCanBeTryWithResources") public void testNodeJoinWithoutCache() throws Exception { IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); final CountDownLatch latch = new CountDownLatch(1); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { latch.countDown(); } }); QueryCursor<Cache.Entry<Integer, Integer>> cur = cache.query(qry); try { try (Ignite ignite = startGrid(NO_CACHE_GRID_NAME)) { log.info("Started node without cache: " + ignite); } cache.put(1, 1); assertTrue(latch.await(5000, MILLISECONDS)); } finally { cur.close(); } } /** * @throws Exception If failed. */ public void testEvents() throws Exception { final AtomicInteger cnt = new AtomicInteger(); final CountDownLatch latch = new CountDownLatch(50); final CountDownLatch execLatch = new CountDownLatch(cacheMode() == REPLICATED ? 1 : gridCount()); IgnitePredicate<Event> lsnr = new IgnitePredicate<Event>() { @Override public boolean apply(Event evt) { assert evt instanceof CacheQueryReadEvent; CacheQueryReadEvent qe = (CacheQueryReadEvent)evt; assertEquals(CONTINUOUS, qe.queryType()); assertNull(qe.cacheName()); assertEquals(grid(0).localNode().id(), qe.subjectId()); assertNull(qe.className()); assertNull(qe.clause()); assertNull(qe.scanQueryFilter()); assertNotNull(qe.continuousQueryFilter()); assertNull(qe.arguments()); cnt.incrementAndGet(); latch.countDown(); return true; } }; IgnitePredicate<Event> execLsnr = new IgnitePredicate<Event>() { @Override public boolean apply(Event evt) { assert evt instanceof CacheQueryExecutedEvent; CacheQueryExecutedEvent qe = (CacheQueryExecutedEvent)evt; assertEquals(CONTINUOUS, qe.queryType()); assertNull(qe.cacheName()); assertEquals(grid(0).localNode().id(), qe.subjectId()); assertNull(qe.className()); assertNull(qe.clause()); assertNull(qe.scanQueryFilter()); assertNotNull(qe.continuousQueryFilter()); assertNull(qe.arguments()); execLatch.countDown(); return true; } }; try { for (int i = 0; i < gridCount(); i++) { grid(i).events().localListen(lsnr, EVT_CACHE_QUERY_OBJECT_READ); grid(i).events().localListen(execLsnr, EVT_CACHE_QUERY_EXECUTED); } IgniteCache<Integer, Integer> cache = grid(0).cache(null); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setLocalListener(new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) { // No-op. } }); qry.setRemoteFilter(new CacheEntryEventSerializableFilter<Integer, Integer>() { @Override public boolean evaluate(CacheEntryEvent<? extends Integer, ? extends Integer> evt) { return evt.getValue() >= 50; } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { for (int i = 0; i < 100; i++) cache.put(i, i); assert latch.await(LATCH_TIMEOUT, MILLISECONDS); assert execLatch.await(LATCH_TIMEOUT, MILLISECONDS); assertEquals(50, cnt.get()); } } finally { for (int i = 0; i < gridCount(); i++) { grid(i).events().stopLocalListen(lsnr, EVT_CACHE_QUERY_OBJECT_READ); grid(i).events().stopLocalListen(execLsnr, EVT_CACHE_QUERY_EXECUTED); } } } /** * */ private static class StoreFactory implements Factory<CacheStore> { @Override public CacheStore create() { return new TestStore(); } } /** * Store. */ private static class TestStore extends CacheStoreAdapter<Object, Object> { /** {@inheritDoc} */ @Override public void loadCache(IgniteBiInClosure<Object, Object> clo, Object... args) { for (int i = 0; i < 10; i++) clo.apply(i, i); } /** {@inheritDoc} */ @Nullable @Override public Object load(Object key) { return null; } /** {@inheritDoc} */ @Override public void write(javax.cache.Cache.Entry<?, ?> entry) throws CacheWriterException { // No-op. } /** {@inheritDoc} */ @Override public void delete(Object key) throws CacheWriterException { // No-op. } } }
package org.multibit.hd.ui.events.view; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; import com.google.common.eventbus.EventBus; import org.bitcoinj.core.Coin; import org.multibit.hd.core.dto.RAGStatus; import org.multibit.hd.core.error_reporting.ExceptionHandler; import org.multibit.hd.ui.events.controller.ShowScreenEvent; import org.multibit.hd.ui.models.AlertModel; import org.multibit.hd.ui.views.ViewKey; import org.multibit.hd.ui.views.components.wallet_detail.WalletDetail; import org.multibit.hd.ui.views.screens.Screen; import org.multibit.hd.ui.views.wizards.AbstractWizardModel; import org.multibit.hd.ui.views.wizards.WizardButton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.swing.*; import java.math.BigDecimal; import java.util.Set; /** * <p>Factory to provide the following to application API:</p> * <ul> * <li>Entry point to broadcast application events associated with the UI</li> * </ul> * <p>An application event is a high level event with specific semantics. Normally a * low level event (such as a mouse click) will initiate it.</p> * * <p>It is expected that ViewEvents will interact with Swing components and as such is * expected to execute on the EDT. This cannot be provided directly within the method * by wrapping since the semantics of the calling code may require synchronous execution * across many subscribers. One example is if the UI is required to "freeze" in order to * prevent the user from interacting with it during an atomic operation.</p> * * @since 0.0.1 */ public class ViewEvents { private static final Logger log = LoggerFactory.getLogger(ViewEvents.class); /** * Use Guava to handle subscribers to events * Do not use this method directly, instead */ private static final EventBus viewEventBus = new EventBus(ExceptionHandler.newSubscriberExceptionHandler()); /** * Keep track of the Guava event bus subscribers for a clean shutdown */ private static final Set<Object> viewEventBusSubscribers = Sets.newHashSet(); /** * A time period used to slow down UI response */ public static final int SLOWDOWN_UPDATE_TIME = 4000; // milliseconds /** * Utilities have a private constructor */ private ViewEvents() { } /** * <p>Subscribe to events. Repeating a subscribe will not affect the event bus.</p> * <p>This approach ensures all subscribers will be correctly removed during a shutdown or wizard hide event</p> * * @param subscriber The subscriber (use the Guava <code>@Subscribe</code> annotation to subscribe a method) */ public static void subscribe(Object subscriber) { Preconditions.checkNotNull(subscriber, "'subscriber' must be present"); if (viewEventBusSubscribers.add(subscriber)) { log.trace("Register: " + subscriber.getClass().getSimpleName()); try { viewEventBus.register(subscriber); } catch (IllegalArgumentException e) { log.warn("Unexpected failure to register"); } } else { log.warn("Subscriber already registered: " + subscriber.getClass().getSimpleName()); } } /** * <p>Unsubscribe a known subscriber from events. Providing an unknown object will not affect the event bus.</p> * <p>This approach ensures all subscribers will be correctly removed during a shutdown or wizard hide event</p> * * @param subscriber The subscriber (use the Guava <code>@Subscribe</code> annotation to subscribe a method) */ public static void unsubscribe(Object subscriber) { Preconditions.checkNotNull(subscriber, "'subscriber' must be present"); if (viewEventBusSubscribers.contains(subscriber)) { log.trace("Unregister: " + subscriber.getClass().getSimpleName()); try { viewEventBus.unregister(subscriber); } catch (IllegalArgumentException e) { log.warn("Unexpected failure to unregister"); } viewEventBusSubscribers.remove(subscriber); } else { log.warn("Subscriber already unregistered: " + subscriber.getClass().getSimpleName()); } } /** * <p>Unsubscribe all subscribers from events</p> * <p>This approach ensures all subscribers will be correctly removed during a shutdown or wizard hide event</p> */ @SuppressWarnings("unchecked") public static void unsubscribeAll() { Set allSubscribers = Sets.newHashSet(); allSubscribers.addAll(viewEventBusSubscribers); for (Object subscriber : allSubscribers) { unsubscribe(subscriber); } allSubscribers.clear(); log.info("All subscribers removed"); } /** * <p>Broadcast a new "balance changed" event</p> * * @param coinBalance The current balance in coins * @param localBalance The current balance in local currency * @param rateProvider The exchange rate provider (e.g. "Bitstamp") */ public static void fireBalanceChangedEvent( final Coin coinBalance, final BigDecimal localBalance, final Optional<String> rateProvider ) { log.trace("Firing 'balance changed' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post( new BalanceChangedEvent( coinBalance, localBalance, rateProvider )); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post( new BalanceChangedEvent( coinBalance, localBalance, rateProvider )); } }); } } /** * <p>Broadcast a new "system status changed" event</p> * * @param localisedMessage The localised message to display alongside the severity * @param severity The system status severity (normally in line with an alert) */ public static void fireSystemStatusChangedEvent(final String localisedMessage, final RAGStatus severity) { log.trace("Firing 'system status changed' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new SystemStatusChangedEvent(localisedMessage, severity)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new SystemStatusChangedEvent(localisedMessage, severity)); } }); } } /** * <p>Broadcast a new "progress changed" event </p> * * @param localisedMessage The localised message to display alongside the progress bar * @param percent The amount to display in percent */ public static void fireProgressChangedEvent(final String localisedMessage, final int percent) { log.trace("Firing 'progress changed' event: '{}'", percent); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new ProgressChangedEvent(localisedMessage, percent)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new ProgressChangedEvent(localisedMessage, percent)); } }); } } /** * <p>Broadcast a new "alert added" event</p> * * @param alertModel The alert model for the new display */ public static void fireAlertAddedEvent(final AlertModel alertModel) { log.trace("Firing 'alert added' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new AlertAddedEvent(alertModel)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new AlertAddedEvent(alertModel)); } }); } } /** * <p>Broadcast a new "switch wallet" event</p> */ public static void fireSwitchWalletEvent() { log.debug("Firing 'switch wallet' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new SwitchWalletEvent()); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new SwitchWalletEvent()); } }); } } /** * <p>Broadcast a new "alert removed" event</p> */ public static void fireAlertRemovedEvent() { log.trace("Firing 'alert removed' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new AlertRemovedEvent()); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new AlertRemovedEvent()); } }); } } /** * <p>Broadcast a new "wallet detail changed" event</p> */ public static void fireWalletDetailChangedEvent(final WalletDetail walletDetail) { log.trace("Firing 'walletDetailChanged' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new WalletDetailChangedEvent(walletDetail)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new WalletDetailChangedEvent(walletDetail)); } }); } } /** * <p>Broadcast a new "wizard button enabled" event</p> * * @param panelName The panel name to which this applies * @param wizardButton The wizard button to which this applies * @param enabled True if the button should be enabled */ public static void fireWizardButtonEnabledEvent( final String panelName, final WizardButton wizardButton, final boolean enabled ) { log.trace("Firing 'wizard button enabled {}' event: {}", panelName, enabled); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new WizardButtonEnabledEvent(panelName, wizardButton, enabled)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new WizardButtonEnabledEvent(panelName, wizardButton, enabled)); } }); } } /** * <p>Broadcast a new "wizard hide" event</p> * * @param panelName The unique panel name to which this applies (use screen name for detail screens) * @param wizardModel The wizard model containing all the user data * @param isExitCancel True if this hide event comes as a result of an exit or cancel */ public static void fireWizardHideEvent( final String panelName, final AbstractWizardModel wizardModel, final boolean isExitCancel ) { log.trace("Firing 'wizard hide' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new WizardHideEvent(panelName, wizardModel, isExitCancel)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new WizardHideEvent(panelName, wizardModel, isExitCancel)); } }); } } /** * <p>Broadcast a new "wizard popover hide" event</p> * * @param panelName The unique panel name to which this applies (use screen name for detail screens) * @param isExitCancel True if this hide event comes as a result of an exit or cancel */ public static void fireWizardPopoverHideEvent(final String panelName, final boolean isExitCancel) { log.trace("Firing 'wizard popover hide' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new WizardPopoverHideEvent(panelName, isExitCancel)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new WizardPopoverHideEvent(panelName, isExitCancel)); } }); } } /** * <p>Broadcast a new "wizard deferred hide" event</p> * * @param panelName The unique panel name to which this applies (use screen name for detail screens) * @param isExitCancel True if this deferred hide event comes as a result of an exit or cancel */ public static void fireWizardDeferredHideEvent(final String panelName, final boolean isExitCancel) { log.trace("Firing 'wizard deferred hide' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new WizardDeferredHideEvent(panelName, isExitCancel)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new WizardDeferredHideEvent(panelName, isExitCancel)); } }); } } /** * <p>Broadcast a new "component changed" event</p> * * @param panelName The unique panel name to which this applies (use screen name for detail screens) * @param componentModel The component model containing the change (absent if the component has no model) */ public static void fireComponentChangedEvent(final String panelName, final Optional componentModel) { log.trace("Firing 'component changed' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new ComponentChangedEvent(panelName, componentModel)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new ComponentChangedEvent(panelName, componentModel)); } }); } } /** * <p>Broadcast a new "verification status changed" event</p> * * @param panelName The panel name to which this applies * @param status True if the verification is OK */ public static void fireVerificationStatusChangedEvent(final String panelName, final boolean status) { log.trace("Firing 'verification status changed' event: {}", status); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new VerificationStatusChangedEvent(panelName, status)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new VerificationStatusChangedEvent(panelName, status)); } }); } } /** * <p>Broadcast a new "view changed" event</p> * * @param viewKey The view to which this applies * @param visible True if the view is "visible" (could be reduced height etc) */ public static void fireViewChangedEvent(final ViewKey viewKey, final boolean visible) { log.trace("Firing 'view changed' event: {}", visible); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new ViewChangedEvent(viewKey, visible)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new ViewChangedEvent(viewKey, visible)); } }); } } /** * <p>Broadcast a new "show detail screen" event</p> * * @param detailScreen The screen to show */ public static void fireShowDetailScreenEvent(final Screen detailScreen) { log.trace("Firing 'show detail screen' event"); if (SwingUtilities.isEventDispatchThread()) { viewEventBus.post(new ShowScreenEvent(detailScreen)); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { viewEventBus.post(new ShowScreenEvent(detailScreen)); } }); } } }
package online.zhaopei.myproject.domain.ecssent; import java.util.Date; import online.zhaopei.myproject.domain.BaseDomain; public class TaxHead extends BaseDomain { /** * serialVersionUID */ private static final long serialVersionUID = 1561700506262521785L; private String headGuid; private Date returnTime; private String customsCode; private String invtNo; private String orderNo; private String logisticsNo; private String taxNo; private Double taxTotal; private Double customsTax; private Double valueAddedTax; private Double consumptionTax; private String status; private String entDutyNo; private String note; private String idNumber; private String assureCode; private String assureName; private String ebcCode; private String ebcName; private String logisticsCode; private String logisticsName; private Integer delFlag; private String msgGuid; private Integer sysDays; private String beginSysDate; private String endSysDate; private String sysDateStr; private Date sysDate; private String agentCode; private String agentName; public String getHeadGuid() { return headGuid; } public void setHeadGuid(String headGuid) { this.headGuid = headGuid; } public Date getReturnTime() { return returnTime; } public void setReturnTime(Date returnTime) { this.returnTime = returnTime; } public String getCustomsCode() { return customsCode; } public void setCustomsCode(String customsCode) { this.customsCode = customsCode; } public String getInvtNo() { return invtNo; } public void setInvtNo(String invtNo) { this.invtNo = invtNo; } public String getTaxNo() { return taxNo; } public void setTaxNo(String taxNo) { this.taxNo = taxNo; } public Double getTaxTotal() { return taxTotal; } public void setTaxTotal(Double taxTotal) { this.taxTotal = taxTotal; } public Double getCustomsTax() { return customsTax; } public void setCustomsTax(Double customsTax) { this.customsTax = customsTax; } public Double getValueAddedTax() { return valueAddedTax; } public void setValueAddedTax(Double valueAddedTax) { this.valueAddedTax = valueAddedTax; } public Double getConsumptionTax() { return consumptionTax; } public void setConsumptionTax(Double consumptionTax) { this.consumptionTax = consumptionTax; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getEntDutyNo() { return entDutyNo; } public void setEntDutyNo(String entDutyNo) { this.entDutyNo = entDutyNo; } public String getNote() { return note; } public void setNote(String note) { this.note = note; } public String getIdNumber() { return idNumber; } public void setIdNumber(String idNumber) { this.idNumber = idNumber; } public String getAssureCode() { return assureCode; } public void setAssureCode(String assureCode) { this.assureCode = assureCode; } public String getEbcCode() { return ebcCode; } public void setEbcCode(String ebcCode) { this.ebcCode = ebcCode; } public String getLogisticsCode() { return logisticsCode; } public void setLogisticsCode(String logisticsCode) { this.logisticsCode = logisticsCode; } public Integer getDelFlag() { return delFlag; } public void setDelFlag(Integer delFlag) { this.delFlag = delFlag; } public String getMsgGuid() { return msgGuid; } public void setMsgGuid(String msgGuid) { this.msgGuid = msgGuid; } public Integer getSysDays() { return sysDays; } public void setSysDays(Integer sysDays) { this.sysDays = sysDays; } public Date getSysDate() { return sysDate; } public void setSysDate(Date sysDate) { this.sysDate = sysDate; } public String getBeginSysDate() { return beginSysDate; } public void setBeginSysDate(String beginSysDate) { this.beginSysDate = beginSysDate; } public String getEndSysDate() { return endSysDate; } public void setEndSysDate(String endSysDate) { this.endSysDate = endSysDate; } public String getSysDateStr() { return sysDateStr; } public void setSysDateStr(String sysDateStr) { this.sysDateStr = sysDateStr; } public String getAgentCode() { return agentCode; } public void setAgentCode(String agentCode) { this.agentCode = agentCode; } public String getOrderNo() { return orderNo; } public void setOrderNo(String orderNo) { this.orderNo = orderNo; } public String getLogisticsNo() { return logisticsNo; } public void setLogisticsNo(String logisticsNo) { this.logisticsNo = logisticsNo; } public String getAssureName() { return assureName; } public void setAssureName(String assureName) { this.assureName = assureName; } public String getEbcName() { return ebcName; } public void setEbcName(String ebcName) { this.ebcName = ebcName; } public String getLogisticsName() { return logisticsName; } public void setLogisticsName(String logisticsName) { this.logisticsName = logisticsName; } public String getAgentName() { return agentName; } public void setAgentName(String agentName) { this.agentName = agentName; } }
/* * ============================================================================= * Simplified BSD License, see http://www.opensource.org/licenses/ * ----------------------------------------------------------------------------- * Copyright (c) 2008-2009, Marco Terzer, Zurich, Switzerland * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Swiss Federal Institute of Technology Zurich * nor the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ============================================================================= */ package ch.javasoft.xml.config; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.lang.reflect.Constructor; import java.net.URL; import java.util.logging.Level; import java.util.logging.Logger; import org.dom4j.Element; /** * Parses stream elements (or other enclosing elements with a type attribute) * of the following types: * <pre> <stream type="console"> <console type="err"/> </stream> <stream type="console"> <console type="out"/> </stream> <stream type="file"> <file name="{work-dir}/{-log[2]}"/> </stream> <stream type="logger"> <logger name="" level="INFO"/> </stream> * </pre> */ public class StreamConfigParser { private static final String LOG_PRINT_STREAM_CLASSNAME = "ch.javasoft.util.logging.LogPrintStream"; public static enum XmlElement implements XmlNode { stream, console, file, url, logger; public String getXmlName() { return name(); } } public static enum XmlAttribute implements XmlNode { name, type, value, level; public String getXmlName() {return name();} } public static enum XmlOutputStreamType { file, console, logger; public String xmlName() { return name(); } public static XmlOutputStreamType find(String typeAttributeValue) { for (XmlOutputStreamType type : values()) { if (type.xmlName().equals(typeAttributeValue)) return type; } return null; } } public static enum XmlInputStreamType { file, url; public String xmlName() { return name(); } public static XmlInputStreamType find(String typeAttributeValue) { for (XmlInputStreamType type : values()) { if (type.xmlName().equals(typeAttributeValue)) return type; } return null; } } public static enum ConsoleType { out, err; public static ConsoleType find(String typeAttributeValue) { for (ConsoleType type : ConsoleType.values()) { if (type.name().equalsIgnoreCase(typeAttributeValue)) return type; } return null; } public PrintStream getStream() { return this == err ? System.err : System.out; } } /** * Parses an output stream of any type, see class comments * (stream element can be named differently). */ public static OutputStream parseOutputStream(Element streamElement) throws XmlConfigException { String typeName = streamElement.attributeValue(XmlAttribute.type.getXmlName()); XmlOutputStreamType type = XmlOutputStreamType.find(typeName); if (type == null) { throw new XmlConfigException("unknown stream type '" + typeName + "'", streamElement); } switch(type) { case file: return parseFileOutputStream(streamElement); case console: return parseConsolePrintStream(streamElement); case logger: return parseLoggerStream(streamElement); default: //should not happen throw new XmlConfigException("internal error, unknown stream type " + type, streamElement); } } /** * Parses (stream element can be named differently): * <pre> <stream type="file"> <file name="{work-dir}/{-out[2]}"/> </stream> * </pre> */ public static FileOutputStream parseFileOutputStream(Element streamElement) throws XmlConfigException { Element elFile = XmlUtil.getRequiredSingleChildElement(streamElement, XmlElement.file); File file = FileConfigParser.parseFile(elFile); try { return new FileOutputStream(file); } catch (IOException ex) { throw new XmlConfigException("cannot open file output stream for file '" + file.getAbsolutePath() + "'", streamElement, ex); } } /** * Parses (stream element can be named differently): * <pre> <stream type="console"> <console type="out"/> </stream> * </pre> */ public static PrintStream parseConsolePrintStream(Element streamElement) throws XmlConfigException { Element elConsole = XmlUtil.getRequiredSingleChildElement(streamElement, XmlElement.console); String consoleName = elConsole.attributeValue(XmlAttribute.type.getXmlName()); ConsoleType type = ConsoleType.find(consoleName); if (type == null) { throw new XmlConfigException("Unknown console type '" + consoleName + "' for " + XmlElement.console.getXmlName() + " element", streamElement); } return type.getStream(); } /** * Parses (stream element can be named differently): * <pre> <stream type="logger"> <logger name="" level="INFO"/> </stream> * </pre> */ public static PrintStream parseLoggerStream(Element streamElement) throws XmlConfigException { Element elLogLevel = XmlUtil.getRequiredSingleChildElement(streamElement, XmlElement.logger); String loggerName = elLogLevel.attributeValue(XmlAttribute.name.getXmlName()); String levelName = elLogLevel.attributeValue(XmlAttribute.level.getXmlName()); Level logLevel; try { logLevel = Level.parse(levelName); } catch (Exception ex) { throw new XmlConfigException("invalid log level: " + levelName, elLogLevel); } Logger logger = Logger.getLogger(loggerName == null ? "" : loggerName); try { Class<?> streamClass = Class.forName(LOG_PRINT_STREAM_CLASSNAME); Constructor cons = streamClass.getConstructor(new Class[] {Logger.class, Level.class}); return (PrintStream)cons.newInstance(new Object[] {logger, logLevel}); } catch(Exception ex) { throw new XmlConfigException("cannot instantiate log print stream, e=" + ex, elLogLevel); } } /** * Parses (stream element can be named differently): * <pre> <stream type="file"> <file name="{work-dir}/{-out[2]}"/> </stream> <stream type="url"> <url name="http://..."/> </stream> <input type="file"> <file name="{work-dir}/{-out[2]}"/> </input> * </pre> */ public static InputStream parseInputStream(Element streamElement) throws XmlConfigException { String typeName = streamElement.attributeValue(XmlAttribute.type.getXmlName()); XmlInputStreamType type = XmlInputStreamType.find(typeName); if (type == null) { throw new XmlConfigException("unknown stream type '" + typeName + "'", streamElement); } switch(type) { case file: return parseFileInputStream(streamElement); case url: return parseURLInputStream(streamElement); default: //should not happen throw new XmlConfigException("internal error, unknown stream type " + type, streamElement); } } /** * Parses (stream element can be named differently): * <pre> <stream type="file"> <file name="{work-dir}/{-out[2]}"/> </stream> * </pre> */ public static FileInputStream parseFileInputStream(Element streamElement) throws XmlConfigException { Element elFile = XmlUtil.getRequiredSingleChildElement(streamElement, XmlElement.file); File file = FileConfigParser.parseFile(elFile); try { return new FileInputStream(file); } catch (IOException ex) { throw new XmlConfigException("cannot open file input stream for file '" + file.getAbsolutePath() + "'", streamElement, ex); } } /** * Parses (stream element can be named differently): * <pre> <stream type="url"> <url name="{work-dir}/{-out[2]}"/> </stream> * </pre> */ public static InputStream parseURLInputStream(Element streamElement) throws XmlConfigException { Element elURL = XmlUtil.getRequiredSingleChildElement(streamElement, XmlElement.url); URL url = URLConfigParser.parseURL(elURL); try { return url.openStream(); } catch (IOException ex) { throw new XmlConfigException("cannot open URL input stream url '" + url.toExternalForm() + "', e=" + ex, streamElement, ex); } } // no instances private StreamConfigParser() {} }
/* Copyright 2010, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.google.refine.tests.exporters; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.io.StringWriter; import java.util.Properties; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; import com.google.refine.ProjectManager; import com.google.refine.ProjectMetadata; import com.google.refine.browsing.Engine; import com.google.refine.exporters.HtmlTableExporter; import com.google.refine.exporters.WriterExporter; import com.google.refine.model.Cell; import com.google.refine.model.Column; import com.google.refine.model.ModelException; import com.google.refine.model.Project; import com.google.refine.model.Row; import com.google.refine.tests.ProjectManagerStub; import com.google.refine.tests.RefineTest; public class HtmlExporterTests extends RefineTest { private static final String TEST_PROJECT_NAME = "html table exporter test project"; @Override @BeforeTest public void init() { logger = LoggerFactory.getLogger(this.getClass()); } //dependencies StringWriter writer; ProjectMetadata projectMetadata; Project project; Engine engine; Properties options; //System Under Test WriterExporter SUT; @BeforeMethod public void SetUp(){ SUT = new HtmlTableExporter(); writer = new StringWriter(); ProjectManager.singleton = new ProjectManagerStub(); projectMetadata = new ProjectMetadata(); project = new Project(); projectMetadata.setName(TEST_PROJECT_NAME); ProjectManager.singleton.registerProject(project, projectMetadata); engine = new Engine(project); options = mock(Properties.class); } @AfterMethod public void TearDown(){ SUT = null; writer = null; ProjectManager.singleton.deleteProject(project.id); project = null; projectMetadata = null; engine = null; options = null; } @Test public void exportSimpleHtmlTable(){ CreateGrid(2, 2); try { SUT.export(project, options, engine, writer); } catch (IOException e) { Assert.fail(); } Assert.assertEquals(writer.toString(), "<html>\n" + "<head>\n" + "<title>" + TEST_PROJECT_NAME + "</title>\n" + "<meta charset=\"utf-8\" />\n" + "</head>\n" + "<body>\n" + "<table>\n" + "<tr><th>column0</th><th>column1</th></tr>\n" + "<tr><td>row0cell0</td><td>row0cell1</td></tr>\n" + "<tr><td>row1cell0</td><td>row1cell1</td></tr>\n" + "</table>\n" + "</body>\n" + "</html>\n"); } // TODO: This test fails because the HTML table exporter // apparently doesn't honor the column header option. Should it? @Test(enabled=false) public void exportSimpleHtmlTableNoHeader(){ CreateGrid(2, 2); when(options.getProperty("printColumnHeader")).thenReturn("false"); try { SUT.export(project, options, engine, writer); } catch (IOException e) { Assert.fail(); } Assert.assertEquals(writer.toString(), "<html>\n" + "<head>\n" + "<title>" + TEST_PROJECT_NAME + "</title>\n" + "<meta charset=\"utf-8\" />\n" + "</head>\n" + "<body>\n" + "<table>\n" + "<tr><td>row0cell0</td><td>row0cell1</td></tr>\n" + "<tr><td>row1cell0</td><td>row1cell1</td></tr>\n" + "</table>\n" + "</body>\n" + "</html>\n"); verify(options,times(2)).getProperty("printColumnHeader"); } @Test public void exportHtmlTableWithEmptyCells(){ CreateGrid(3,3); project.rows.get(1).cells.set(1, null); project.rows.get(2).cells.set(0, null); try { SUT.export(project, options, engine, writer); } catch (IOException e) { Assert.fail(); } Assert.assertEquals(writer.toString(), "<html>\n" + "<head>\n" + "<title>" + TEST_PROJECT_NAME + "</title>\n" + "<meta charset=\"utf-8\" />\n" + "</head>\n" + "<body>\n" + "<table>\n" + "<tr><th>column0</th><th>column1</th><th>column2</th></tr>\n" + "<tr><td>row0cell0</td><td>row0cell1</td><td>row0cell2</td></tr>\n" + "<tr><td>row1cell0</td><td></td><td>row1cell2</td></tr>\n" + "<tr><td></td><td>row2cell1</td><td>row2cell2</td></tr>\n" + "</table>\n" + "</body>\n" + "</html>\n"); } //helper methods protected void CreateColumns(int noOfColumns){ for(int i = 0; i < noOfColumns; i++){ try { project.columnModel.addColumn(i, new Column(i, "column" + i), true); } catch (ModelException e1) { Assert.fail("Could not create column"); } } } protected void CreateGrid(int noOfRows, int noOfColumns){ CreateColumns(noOfColumns); for(int i = 0; i < noOfRows; i++){ Row row = new Row(noOfColumns); for(int j = 0; j < noOfColumns; j++){ row.cells.add(new Cell("row" + i + "cell" + j, null)); } project.rows.add(row); } } }
/* * Copyright 2015 DECOIT GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.decoit.simu.cbor.ifmap.enums; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Thomas Rix (rix@decoit.de) */ public class CBORTagsTest { @Test public void testFromTagNumber_DateTimeString() { long i = 0L; CBORTags expResult = CBORTags.DATE_TIME_STRING; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_DateTimeEpoch() { long i = 1L; CBORTags expResult = CBORTags.DATE_TIME_EPOCH; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_PositiveBignum() { long i = 2L; CBORTags expResult = CBORTags.POSITIVE_BIGNUM; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_NegativeBignum() { long i = 3L; CBORTags expResult = CBORTags.NEGATIVE_BIGNUM; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_DecimalFraction() { long i = 4L; CBORTags expResult = CBORTags.DECIMAL_FRACTION; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Bigfloat() { long i = 5L; CBORTags expResult = CBORTags.BIGFLOAT; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Base64UrlEncoding() { long i = 21L; CBORTags expResult = CBORTags.BASE64_URL_ENCODING; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Base64Encoding() { long i = 22L; CBORTags expResult = CBORTags.BASE64_ENCODING; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Base16Encoding() { long i = 23L; CBORTags expResult = CBORTags.BASE16_ENCODING; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_EncodedCbor() { long i = 24L; CBORTags expResult = CBORTags.ENCODED_CBOR_DATA_ITEM; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Uri() { long i = 32L; CBORTags expResult = CBORTags.URI; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Base64Url() { long i = 33L; CBORTags expResult = CBORTags.BASE64_URL; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_Base64() { long i = 34L; CBORTags expResult = CBORTags.BASE64; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_RegEx() { long i = 35L; CBORTags expResult = CBORTags.REGULAR_EXPRESSION; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_MimeMessage() { long i = 36L; CBORTags expResult = CBORTags.MIME_MESSAGE; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_ExtendedIdentifier() { long i = 42000L; CBORTags expResult = CBORTags.IF_MAP_EXTENDED_IDENTIFIER; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_IPv4() { long i = 42001L; CBORTags expResult = CBORTags.IPV4_ADDRESS; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_IPv6() { long i = 42002L; CBORTags expResult = CBORTags.IPV6_ADDRESS; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_MacAddress() { long i = 42003L; CBORTags expResult = CBORTags.MAC_ADDRESS; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test public void testFromTagNumber_SelfDescribeCbor() { long i = 55799L; CBORTags expResult = CBORTags.SELF_DESCRIBE_CBOR; CBORTags result = CBORTags.fromTagNumber(i); assertEquals(expResult, result); } @Test(expected = IllegalArgumentException.class) public void testFromTagNumber_UnknownTagNumber() { long i = -1L; CBORTags result = CBORTags.fromTagNumber(i); } }
package com.precisionguessworks.frc; import edu.wpi.first.wpilibj.AnalogChannel; import edu.wpi.first.wpilibj.CANJaguar; import edu.wpi.first.wpilibj.PIDController; import edu.wpi.first.wpilibj.PIDOutput; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.can.CANTimeoutException; public class Arm { public static final int kBaseValue = 200; public static final double kPUp = 0.004; public static final double kIUp = 0.00007; public static final double kDUp = .00002; public static final double kPDown = 0.001; public static final double kIDown = .000013; public static final double kDDown = .00002; public static final double kPStable = 0.006; public static final double kIStable = 0.000075; public static final double kDStable = .000022; public static final int kMaxSetpointDelta = 10; // units of (points*10^2)/sec private final AnalogChannel armPotentiometer; private final ArmOutput armOutput; private final PIDController controller; private int lastPosition = 0; private int targetPosition = 0; private int position = 0; private int prevPosition = 0; public Arm(AnalogChannel armPotentiometer, CANJaguar topMotor, CANJaguar bottomMotor) { this.armPotentiometer = armPotentiometer; this.armOutput = new ArmOutput(topMotor, bottomMotor); this.controller = new PIDController(kPUp, kIUp, kDUp, armPotentiometer, armOutput); this.controller.setOutputRange(-0.5, 0.5); this.controller.setInputRange(150, 900); this.controller.enable(); this.targetPosition = this.getCurrentPosition(); } public int getCurrentPosition() { return this.armPotentiometer.getValue(); } public int getTargetPosition() { return (int) this.targetPosition; } public int getSetpoint() { return (int) this.controller.getSetpoint(); } public void holdPosition() { System.out.println("hold position: " + this.getCurrentPosition()); this.setRawPosition(this.getCurrentPosition()); } public void schedule() { position = this.getTargetPosition(); lastPosition = this.getRawPosition(); // If we aren't moving and not too far off, bump up the power! if(Math.abs(prevPosition - lastPosition) < 3 && Math.abs(lastPosition - position) < 40) { if(lastPosition > 500) { this.controller.setPID(kPStable / 3, kIStable, kDStable); } else { this.controller.setPID(kPStable, kIStable, kDStable); } this.controller.setOutputRange(-0.5, .5); // System.out.println("using stable pid."); } else if (position > lastPosition) { //GOING UP this.controller.setPID(kPUp, kIUp, kDUp); this.controller.setOutputRange(-0.5, .5); // System.out.println("up pid"); } else { //GOING DOWN this.controller.setPID(kPDown, kIDown, kDDown); this.controller.setOutputRange(-0.45, 0.45); // System.out.println("down pid"); } // System.out.println("scheduler: " + position + ", " + lastPosition); if(position - lastPosition > 0) { // INCREASING if(position - lastPosition > kMaxSetpointDelta) { // System.out.println("Would be scheduled to " + (lastPosition + kMaxSetpointDelta)); // System.out.println(" instead of: " + position); position = lastPosition + kMaxSetpointDelta; } } else if (lastPosition - position > 0) { // DECREASING if(lastPosition - position > kMaxSetpointDelta) { // System.out.println("Would be scheduled to " + (lastPosition - kMaxSetpointDelta)); // System.out.println(" instead of: " + position); position = lastPosition - kMaxSetpointDelta; } } this.setRawPosition(position); prevPosition = lastPosition; } private int time; public void setRawPosition(int position) { this.controller.setSetpoint(position); } public int getRawPosition() { return (int)this.controller.getSetpoint(); } public void setPosition(int position) { int newPosition = kBaseValue + position; // System.out.println("target position: " + newPosition); if (this.targetPosition > newPosition) { // Going DOWN this.controller.setPID(kPDown, kIDown, kDDown); this.controller.setOutputRange(-0.45, 0.45); System.out.println("down pid"); } else { // Going UP this.controller.setPID(kPUp, kIUp, kDUp); this.controller.setOutputRange(-0.5, .5); System.out.println("up pid"); } this.targetPosition = newPosition; } public double getCurrentSpeed() { return this.armOutput.getCurrentSpeed(); } public void resetSpeedLimiter() { this.armOutput.resetPrevSpeed(); } protected void resetPIDController() { this.controller.reset(); this.holdPosition(); this.controller.enable(); } protected void resetPIDInternals() { this.controller.reset(); this.controller.enable(); } public void manualDrive(double output) { this.armOutput.drive(output); } protected static double limit(double num) { if (num > 1.0) { return 1.0; } if (num < -1.0) { return -1.0; } return num; } public class ArmPID extends PIDController { public ArmPID(double kP, double kI, double kD, PIDSource pidSource, PIDOutput pidOutput) { super(kP, kI, kD, pidSource, pidOutput); } // public double getTotalError() { // return this.m_totalError; // } } public class ArmOutput implements PIDOutput { public static final double maxDelta = .05; private CANJaguar topMotor; private CANJaguar bottomMotor; private double prevSpeed = 0; public ArmOutput(CANJaguar topMotor, CANJaguar bottomMotor) { this.topMotor = topMotor; this.bottomMotor = bottomMotor; } public void resetPrevSpeed() { this.prevSpeed = 0; } public double getCurrentSpeed() { return this.prevSpeed; } public void drive(double output) { byte syncGroup = (byte) 64; if (output > prevSpeed && Math.abs(output - prevSpeed) > maxDelta) { output = prevSpeed + maxDelta; } else if (output < prevSpeed && Math.abs(prevSpeed - output) > maxDelta) { output = prevSpeed - maxDelta; } double top = Arm.limit(output); double bottom = -Arm.limit(output); this.prevSpeed = output; try { this.topMotor.setX(top, syncGroup); this.bottomMotor.setX(bottom, syncGroup); CANJaguar.updateSyncGroup(syncGroup); } catch (CANTimeoutException ex) { } } public void pidWrite(double output) { this.drive(output); } } }
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.dao; import com.thoughtworks.go.domain.NotificationFilter; import com.thoughtworks.go.domain.StageEvent; import com.thoughtworks.go.domain.User; import com.thoughtworks.go.server.cache.GoCache; import org.hamcrest.Matchers; import org.hibernate.SessionFactory; import org.hibernate.stat.SecondLevelCacheStatistics; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.util.StopWatch; import java.util.Arrays; import java.util.HashSet; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.MatcherAssert.assertThat; @ExtendWith(SpringExtension.class) @ContextConfiguration(locations = { "classpath:/applicationContext-global.xml", "classpath:/applicationContext-dataLocalAccess.xml", "classpath:/testPropertyConfigurer.xml", "classpath:/spring-all-servlet.xml", }) public class UserSqlMapDaoCachingTest { @Autowired private UserSqlMapDao userDao; @Autowired private DatabaseAccessHelper dbHelper; @Autowired private GoCache goCache; @Autowired private SessionFactory sessionFactory; @BeforeEach public void setup() throws Exception { sessionFactory.getStatistics().clear(); dbHelper.onSetUp(); } @AfterEach public void teardown() throws Exception { dbHelper.onTearDown(); sessionFactory.getStatistics().clear(); } @Test public void shouldCacheUserOnFind() { User first = new User("first"); first.addNotificationFilter(new NotificationFilter("pipline", "stage1", StageEvent.Fails, true)); first.addNotificationFilter(new NotificationFilter("pipline", "stage2", StageEvent.Fails, true)); int originalUserCacheSize = sessionFactory.getStatistics().getSecondLevelCacheStatistics(User.class.getCanonicalName()).getEntries().size(); int originalNotificationsCacheSize = sessionFactory.getStatistics().getSecondLevelCacheStatistics(User.class.getCanonicalName() + ".notificationFilters").getEntries().size(); userDao.saveOrUpdate(first); long userId = userDao.findUser("first").getId(); assertThat(sessionFactory.getStatistics().getSecondLevelCacheStatistics(User.class.getCanonicalName()).getEntries().size(), is(originalUserCacheSize + 1)); SecondLevelCacheStatistics notificationFilterCollectionCache = sessionFactory.getStatistics().getSecondLevelCacheStatistics(User.class.getCanonicalName() + ".notificationFilters"); assertThat(notificationFilterCollectionCache.getEntries().size(), is(originalNotificationsCacheSize + 1)); assertThat(notificationFilterCollectionCache.getEntries().get(userId), is(Matchers.notNullValue())); } @Test public void shouldRemoveEnabledUserCountFromCacheWhenAUserIsSaved() throws Exception { makeSureThatCacheIsInitialized(); userDao.saveOrUpdate(new User("some-random-user")); assertThatEnabledUserCacheHasBeenCleared(); } @Test public void shouldRemoveEnabledUserCountFromCacheWhenAUserIsDisabled() throws Exception { userDao.saveOrUpdate(new User("some-random-user")); makeSureThatCacheIsInitialized(); userDao.disableUsers(Arrays.asList("some-random-user")); assertThatEnabledUserCacheHasBeenCleared(); } @Test public void shouldRemoveEnabledUserCountFromCacheWhenAUserIsEnabled() throws Exception { userDao.saveOrUpdate(new User("some-random-user")); makeSureThatCacheIsInitialized(); userDao.enableUsers(Arrays.asList("some-random-user")); assertThatEnabledUserCacheHasBeenCleared(); } @Test public void shouldNOTRemoveEnabledUserCountFromCacheWhenFindUserHappens() throws Exception { makeSureThatCacheIsInitialized(); userDao.findUser("some-random-user"); assertThatEnabledUserCacheExists(); } @Test public void shouldNOTRemoveEnabledUserCountFromCacheWhenAllUsersAreLoaded() throws Exception { makeSureThatCacheIsInitialized(); userDao.allUsers(); assertThatEnabledUserCacheExists(); } @Test public void shouldNOTRemoveEnabledUserCountFromCacheWhenEnabledUsersAreLoaded() throws Exception { makeSureThatCacheIsInitialized(); userDao.enabledUsers(); assertThatEnabledUserCacheExists(); } @Test public void shouldNOTRemoveEnabledUserCountFromCacheWhenFindUsernamesForIds() throws Exception { userDao.saveOrUpdate(new User("some-random-user")); User user = userDao.findUser("some-random-user"); HashSet<Long> userIds = new HashSet<>(); userIds.add(user.getId()); makeSureThatCacheIsInitialized(); userDao.findUsernamesForIds(userIds); assertThatEnabledUserCacheExists(); } @Test public void shouldNOTRemoveEnabledUserCountFromCacheWhenUserIsLoaded() throws Exception { userDao.saveOrUpdate(new User("some-random-user")); User user = userDao.findUser("some-random-user"); makeSureThatCacheIsInitialized(); userDao.load(user.getId()); assertThatEnabledUserCacheExists(); } @Test @Timeout(60) public void enabledUserCacheShouldBeThreadSafe() throws Exception { ThreadSafetyChecker threadSafetyChecker = new ThreadSafetyChecker(10000); threadSafetyChecker.addOperation(new ThreadSafetyChecker.Operation() { @Override public void execute(int runIndex) { StopWatch stopWatch = new StopWatch("enabledUserCount"); stopWatch.start("enabledUserCount"); userDao.enabledUserCount(); stopWatch.stop(); // System.out.println(stopWatch.shortSummary()); } }); threadSafetyChecker.addOperation(new ThreadSafetyChecker.Operation() { @Override public void execute(int runIndex) { StopWatch stopWatch = new StopWatch("saveOrUpdate"); stopWatch.start("saveOrUpdate"); userDao.saveOrUpdate(new User("some-random-user " + runIndex)); stopWatch.stop(); // System.out.println(stopWatch.shortSummary()); } }); threadSafetyChecker.addOperation(new ThreadSafetyChecker.Operation() { @Override public void execute(int runIndex) { StopWatch stopWatch = new StopWatch("enableUsers"); stopWatch.start("enableUsers"); userDao.enableUsers(Arrays.asList("some-random-user " + runIndex)); stopWatch.stop(); // System.out.println(stopWatch.shortSummary()); } }); threadSafetyChecker.run(250); } private void assertThatEnabledUserCacheHasBeenCleared() { assertThat(goCache.get(UserSqlMapDao.ENABLED_USER_COUNT_CACHE_KEY), is(nullValue())); } private void assertThatEnabledUserCacheExists() { assertThat(goCache.get(UserSqlMapDao.ENABLED_USER_COUNT_CACHE_KEY), is(not(nullValue()))); } private void makeSureThatCacheIsInitialized() { userDao.enabledUserCount(); assertThatEnabledUserCacheExists(); } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.examples.bigquery; import com.google.cloud.Tuple; import com.google.cloud.WriteChannel; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryError; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.CopyJobConfiguration; import com.google.cloud.bigquery.Dataset; import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.DatasetInfo; import com.google.cloud.bigquery.ExternalTableDefinition; import com.google.cloud.bigquery.ExtractJobConfiguration; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.FieldValue; import com.google.cloud.bigquery.FormatOptions; import com.google.cloud.bigquery.Job; import com.google.cloud.bigquery.JobId; import com.google.cloud.bigquery.JobInfo; import com.google.cloud.bigquery.LoadJobConfiguration; import com.google.cloud.bigquery.QueryRequest; import com.google.cloud.bigquery.QueryResponse; import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.StandardTableDefinition; import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.ViewDefinition; import com.google.cloud.bigquery.WriteChannelConfiguration; import com.google.common.collect.ImmutableMap; import java.nio.channels.FileChannel; import java.nio.file.Paths; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; /** * An example of using Google BigQuery. * * <p>This example demonstrates a simple/typical BigQuery usage. * * <p>See the * <a href="https://github.com/GoogleCloudPlatform/google-cloud-java/blob/master/google-cloud-examples/README.md"> * README</a> for compilation instructions. Run this code with * <pre>{@code target/appassembler/bin/BigQueryExample [<project_id>] * list datasets | * list tables <dataset> | * list jobs | * list data <dataset> <table> | * info dataset <dataset> | * info table <dataset> <table> | * info job <job> | * create dataset <dataset> | * create table <dataset> <table> (<fieldName>:<primitiveType>)+ | * create view <dataset> <table> <query> | * create external-table <dataset> <table> <format> (<fieldName>:<primitiveType>)+ <sourceUri> | * delete dataset <dataset> | * delete table <dataset> <table> | * cancel <job> | * copy <sourceDataset> <sourceTable> <destinationDataset> <destinationTable> | * load <dataset> <table> <format> <sourceUri>+ | * extract <dataset> <table> <format> <destinationUri>+ | * query <query> | * load-file <dataset> <table> <format> <filePath>}</pre> * * <p>The first parameter is an optional {@code project_id} (logged-in project will be used if not * supplied). Second parameter is a BigQuery operation and can be used to demonstrate its usage. For * operations that apply to more than one entity (`list`, `create`, `info` and `delete`) the third * parameter specifies the entity. {@code <primitiveType>} indicates that only primitive types are * supported by the {@code create table} and {@code create external-table} operations * ({@code string}, {@code float}, {@code integer}, {@code timestamp}, {@code boolean}, * {@code bytes}). {@code <sourceUri>}, {@code <sourceUris>} and {@code <destinationUris>} * parameters are URIs to Google Cloud Storage blobs, in the form {@code gs://bucket/path}. * See each action's run method for the specific BigQuery interaction. */ public class BigQueryExample { private static final int CHUNK_SIZE = 8 * 256 * 1024; private static final Map<String, BigQueryAction> CREATE_ACTIONS = new HashMap<>(); private static final Map<String, BigQueryAction> INFO_ACTIONS = new HashMap<>(); private static final Map<String, BigQueryAction> LIST_ACTIONS = new HashMap<>(); private static final Map<String, BigQueryAction> DELETE_ACTIONS = new HashMap<>(); private static final Map<String, BigQueryAction> ACTIONS = new HashMap<>(); private abstract static class BigQueryAction<T> { abstract void run(BigQuery bigquery, T arg) throws Exception; abstract T parse(String... args) throws Exception; protected String params() { return ""; } } private static class ParentAction extends BigQueryAction<Tuple<BigQueryAction, Object>> { private final Map<String, BigQueryAction> subActions; public ParentAction(Map<String, BigQueryAction> subActions) { this.subActions = ImmutableMap.copyOf(subActions); } @Override @SuppressWarnings("unchecked") void run(BigQuery bigquery, Tuple<BigQueryAction, Object> subaction) throws Exception { subaction.x().run(bigquery, subaction.y()); } @Override Tuple<BigQueryAction, Object> parse(String... args) throws Exception { if (args.length >= 1) { BigQueryAction action = subActions.get(args[0]); if (action != null) { Object actionArguments = action.parse(Arrays.copyOfRange(args, 1, args.length)); return Tuple.of(action, actionArguments); } else { throw new IllegalArgumentException("Unrecognized entity '" + args[0] + "'."); } } throw new IllegalArgumentException("Missing required entity."); } @Override public String params() { StringBuilder builder = new StringBuilder(); for (Map.Entry<String, BigQueryAction> entry : subActions.entrySet()) { builder.append('\n').append(entry.getKey()); String param = entry.getValue().params(); if (param != null && !param.isEmpty()) { builder.append(' ').append(param); } } return builder.toString(); } } private abstract static class NoArgsAction extends BigQueryAction<Void> { @Override Void parse(String... args) throws Exception { if (args.length == 0) { return null; } throw new IllegalArgumentException("This action takes no arguments."); } } /** * This class demonstrates how to list BigQuery Datasets. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/datasets/list">Datasets: list * </a> */ private static class ListDatasetsAction extends NoArgsAction { @Override public void run(BigQuery bigquery, Void arg) { for (Dataset dataset : bigquery.listDatasets().iterateAll()) { System.out.println(dataset); } } } private abstract static class DatasetAction extends BigQueryAction<DatasetId> { @Override DatasetId parse(String... args) throws Exception { String message; if (args.length == 1) { return DatasetId.of(args[0]); } else if (args.length > 1) { message = "Too many arguments."; } else { message = "Missing required dataset id."; } throw new IllegalArgumentException(message); } @Override public String params() { return "<dataset>"; } } /** * This class demonstrates how to list BigQuery Tables in a Dataset. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/list">Tables: list</a> */ private static class ListTablesAction extends DatasetAction { @Override public void run(BigQuery bigquery, DatasetId datasetId) { for (Table table : bigquery.listTables(datasetId).iterateAll()) { System.out.println(table); } } } /** * This class demonstrates how to retrieve information on a BigQuery Dataset. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/datasets/get">Datasets: get * </a> */ private static class DatasetInfoAction extends DatasetAction { @Override public void run(BigQuery bigquery, DatasetId datasetId) { System.out.println("Dataset info: " + bigquery.getDataset(datasetId)); } } /** * This class demonstrates how to create a BigQuery Dataset. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/datasets/insert">Datasets: * insert</a> */ private static class CreateDatasetAction extends DatasetAction { @Override public void run(BigQuery bigquery, DatasetId datasetId) { bigquery.create(DatasetInfo.newBuilder(datasetId).build()); System.out.println("Created dataset " + datasetId); } } /** * This class demonstrates how to delete a BigQuery Dataset. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/datasets/delete">Datasets: * delete</a> */ private static class DeleteDatasetAction extends DatasetAction { @Override public void run(BigQuery bigquery, DatasetId datasetId) { if (bigquery.delete(datasetId)) { System.out.println("Dataset " + datasetId + " was deleted"); } else { System.out.println("Dataset " + datasetId + " not found"); } } } private abstract static class TableAction extends BigQueryAction<TableId> { @Override TableId parse(String... args) throws Exception { String message; if (args.length == 2) { return TableId.of(args[0], args[1]); } else if (args.length < 2) { message = "Missing required dataset and table id."; } else { message = "Too many arguments."; } throw new IllegalArgumentException(message); } @Override public String params() { return "<dataset> <table>"; } } /** * This class demonstrates how to retrieve information on a BigQuery Table. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/get">Tables: get</a> */ private static class TableInfoAction extends TableAction { @Override public void run(BigQuery bigquery, TableId tableId) { System.out.println("Table info: " + bigquery.getTable(tableId)); } } /** * This class demonstrates how to delete a BigQuery Table. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/delete">Tables: delete * </a> */ private static class DeleteTableAction extends TableAction { @Override public void run(BigQuery bigquery, TableId tableId) { if (bigquery.delete(tableId)) { System.out.println("Table " + tableId + " was deleted"); } else { System.out.println("Table " + tableId + " not found"); } } } /** * This class demonstrates how to list the rows in a BigQuery Table. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list">Tabledata: * list</a> */ private static class ListTableDataAction extends TableAction { @Override public void run(BigQuery bigquery, TableId tableId) { for (List<FieldValue> row : bigquery.listTableData(tableId).iterateAll()) { System.out.println(row); } } } private abstract static class JobAction extends BigQueryAction<JobId> { @Override JobId parse(String... args) throws Exception { String message; if (args.length == 1) { return JobId.of(args[0]); } else if (args.length > 1) { message = "Too many arguments."; } else { message = "Missing required query."; } throw new IllegalArgumentException(message); } @Override public String params() { return "<job>"; } } /** * This class demonstrates how to list BigQuery Jobs. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/list">Jobs: list</a> */ private static class ListJobsAction extends NoArgsAction { @Override public void run(BigQuery bigquery, Void arg) { for (Job job : bigquery.listJobs().iterateAll()) { System.out.println(job); } } } /** * This class demonstrates how to retrieve information on a BigQuery Job. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/get">Jobs: get</a> */ private static class JobInfoAction extends JobAction { @Override public void run(BigQuery bigquery, JobId jobId) { System.out.println("Job info: " + bigquery.getJob(jobId)); } } /** * This class demonstrates how to cancel a BigQuery Job. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/cancel">Jobs: cancel</a> */ private static class CancelJobAction extends JobAction { @Override public void run(BigQuery bigquery, JobId jobId) { if (bigquery.cancel(jobId)) { System.out.println("Requested cancel for job " + jobId); } else { System.out.println("Job " + jobId + " not found"); } } } private abstract static class CreateTableAction extends BigQueryAction<TableInfo> { @Override void run(BigQuery bigquery, TableInfo table) throws Exception { Table createTable = bigquery.create(table); System.out.println("Created table:"); System.out.println(createTable.toString()); } static Schema parseSchema(String[] args, int start, int end) { Schema.Builder builder = Schema.newBuilder(); for (int i = start; i < end; i++) { String[] fieldsArray = args[i].split(":"); if (fieldsArray.length != 2) { throw new IllegalArgumentException("Unrecognized field definition '" + args[i] + "'."); } String fieldName = fieldsArray[0]; String typeString = fieldsArray[1].toLowerCase(); Field.Type fieldType; switch (typeString) { case "string": fieldType = Field.Type.string(); break; case "integer": fieldType = Field.Type.integer(); break; case "timestamp": fieldType = Field.Type.timestamp(); break; case "float": fieldType = Field.Type.floatingPoint(); break; case "boolean": fieldType = Field.Type.bool(); break; case "bytes": fieldType = Field.Type.bytes(); break; default: throw new IllegalArgumentException("Unrecognized field type '" + typeString + "'."); } builder.addField(Field.of(fieldName, fieldType)); } return builder.build(); } } /** * This class demonstrates how to create a simple BigQuery Table (i.e. a table created from a * {@link StandardTableDefinition}). * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/insert">Tables: insert * </a> */ private static class CreateSimpleTableAction extends CreateTableAction { @Override TableInfo parse(String... args) throws Exception { if (args.length >= 3) { String dataset = args[0]; String table = args[1]; TableId tableId = TableId.of(dataset, table); return TableInfo.of(tableId, StandardTableDefinition.of(parseSchema(args, 2, args.length))); } throw new IllegalArgumentException("Missing required arguments."); } @Override protected String params() { return "<dataset> <table> (<fieldName>:<primitiveType>)+"; } } /** * This class demonstrates how to create a BigQuery External Table (i.e. a table created from a * {@link ExternalTableDefinition}). * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/insert">Tables: insert * </a> */ private static class CreateExternalTableAction extends CreateTableAction { @Override TableInfo parse(String... args) throws Exception { if (args.length >= 5) { String dataset = args[0]; String table = args[1]; TableId tableId = TableId.of(dataset, table); ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of(args[args.length - 1], parseSchema(args, 3, args.length - 1), FormatOptions.of(args[2])); return TableInfo.of(tableId, externalTableDefinition); } throw new IllegalArgumentException("Missing required arguments."); } @Override protected String params() { return "<dataset> <table> <format> (<fieldName>:<primitiveType>)+ <sourceUri>"; } } /** * This class demonstrates how to create a BigQuery View Table (i.e. a table created from a * {@link ViewDefinition}). * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/tables/insert">Tables: insert * </a> */ private static class CreateViewAction extends CreateTableAction { @Override TableInfo parse(String... args) throws Exception { String message; if (args.length == 3) { String dataset = args[0]; String table = args[1]; String query = args[2]; TableId tableId = TableId.of(dataset, table); return TableInfo.of(tableId, ViewDefinition.of(query)); } else if (args.length < 3) { message = "Missing required dataset id, table id or query."; } else { message = "Too many arguments."; } throw new IllegalArgumentException(message); } @Override protected String params() { return "<dataset> <table> <query>"; } } private abstract static class JobRunAction extends BigQueryAction<JobInfo> { @Override void run(BigQuery bigquery, JobInfo job) throws Exception { System.out.println("Creating job"); Job startedJob = bigquery.create(job); while (!startedJob.isDone()) { System.out.println("Waiting for job " + startedJob.getJobId().getJob() + " to complete"); Thread.sleep(1000L); } startedJob = startedJob.reload(); if (startedJob.getStatus().getError() == null) { System.out.println("Job " + startedJob.getJobId().getJob() + " succeeded"); } else { System.out.println("Job " + startedJob.getJobId().getJob() + " failed"); System.out.println("Error: " + startedJob.getStatus().getError()); } } } /** * This class demonstrates how to create a BigQuery Load Job and wait for it to complete. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert">Jobs: insert</a> */ private static class LoadAction extends JobRunAction { @Override JobInfo parse(String... args) throws Exception { if (args.length >= 4) { String dataset = args[0]; String table = args[1]; String format = args[2]; TableId tableId = TableId.of(dataset, table); LoadJobConfiguration configuration = LoadJobConfiguration.of( tableId, Arrays.asList(args).subList(3, args.length), FormatOptions.of(format)); return JobInfo.of(configuration); } throw new IllegalArgumentException("Missing required arguments."); } @Override protected String params() { return "<dataset> <table> <format> <sourceUri>+"; } } /** * This class demonstrates how to create a BigQuery Extract Job and wait for it to complete. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert">Jobs: insert</a> */ private static class ExtractAction extends JobRunAction { @Override JobInfo parse(String... args) throws Exception { if (args.length >= 4) { String dataset = args[0]; String table = args[1]; String format = args[2]; TableId tableId = TableId.of(dataset, table); ExtractJobConfiguration configuration = ExtractJobConfiguration.of( tableId, Arrays.asList(args).subList(3, args.length), format); return JobInfo.of(configuration); } throw new IllegalArgumentException("Missing required arguments."); } @Override protected String params() { return "<dataset> <table> <format> <destinationUri>+"; } } /** * This class demonstrates how to create a BigQuery Copy Job and wait for it to complete. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert">Jobs: insert</a> */ private static class CopyAction extends JobRunAction { @Override JobInfo parse(String... args) throws Exception { String message; if (args.length == 4) { TableId sourceTableId = TableId.of(args[0], args[1]); TableId destinationTableId = TableId.of(args[2], args[3]); return JobInfo.of(CopyJobConfiguration.of(destinationTableId, sourceTableId)); } else if (args.length < 3) { message = "Missing required source or destination table."; } else { message = "Too many arguments."; } throw new IllegalArgumentException(message); } @Override protected String params() { return "<sourceDataset> <sourceTable> <destinationDataset> <destinationTable>"; } } /** * This class demonstrates how to run a BigQuery SQL Query and wait for associated job to * complete. Results or errors are shown. * * @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/query">Jobs: query</a> */ private static class QueryAction extends BigQueryAction<QueryRequest> { @Override void run(BigQuery bigquery, QueryRequest queryRequest) throws Exception { System.out.println("Running query"); QueryResponse queryResponse = bigquery.query(queryRequest); while (!queryResponse.jobCompleted()) { System.out.println("Waiting for query job " + queryResponse.getJobId() + " to complete"); Thread.sleep(1000L); queryResponse = bigquery.getQueryResults(queryResponse.getJobId()); } if (!queryResponse.hasErrors()) { System.out.println("Query succeeded. Results:"); for (List<FieldValue> row : queryResponse.getResult().iterateAll()) { System.out.println(row); } } else { System.out.println("Query completed with errors. Errors:"); for (BigQueryError err : queryResponse.getExecutionErrors()) { System.out.println(err); } } } @Override QueryRequest parse(String... args) throws Exception { String message; if (args.length == 1) { return QueryRequest.of(args[0]); } else if (args.length > 1) { message = "Too many arguments."; } else { message = "Missing required query."; } throw new IllegalArgumentException(message); } @Override protected String params() { return "<query>"; } } /** * This class demonstrates how to load data into a BigQuery Table from a local file. * * @see <a href="https://cloud.google.com/bigquery/loading-data-post-request#resumable">Resumable * Upload</a> */ private static class LoadFileAction extends BigQueryAction<Tuple<WriteChannelConfiguration, String>> { @Override void run(BigQuery bigquery, Tuple<WriteChannelConfiguration, String> configuration) throws Exception { System.out.println("Running insert"); try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { WriteChannel writeChannel = bigquery.writer(configuration.x()); long position = 0; long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); while (written > 0) { position += written; written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); } writeChannel.close(); } } @Override Tuple<WriteChannelConfiguration, String> parse(String... args) throws Exception { if (args.length == 4) { String dataset = args[0]; String table = args[1]; String format = args[2]; TableId tableId = TableId.of(dataset, table); WriteChannelConfiguration configuration = WriteChannelConfiguration.of(tableId, FormatOptions.of(format)); return Tuple.of(configuration, args[3]); } throw new IllegalArgumentException("Missing required arguments."); } @Override protected String params() { return "<dataset> <table> <format> <filePath>"; } } static { CREATE_ACTIONS.put("dataset", new CreateDatasetAction()); CREATE_ACTIONS.put("table", new CreateSimpleTableAction()); CREATE_ACTIONS.put("view", new CreateViewAction()); CREATE_ACTIONS.put("external-table", new CreateExternalTableAction()); INFO_ACTIONS.put("dataset", new DatasetInfoAction()); INFO_ACTIONS.put("table", new TableInfoAction()); INFO_ACTIONS.put("job", new JobInfoAction()); LIST_ACTIONS.put("datasets", new ListDatasetsAction()); LIST_ACTIONS.put("tables", new ListTablesAction()); LIST_ACTIONS.put("jobs", new ListJobsAction()); LIST_ACTIONS.put("data", new ListTableDataAction()); DELETE_ACTIONS.put("dataset", new DeleteDatasetAction()); DELETE_ACTIONS.put("table", new DeleteTableAction()); ACTIONS.put("create", new ParentAction(CREATE_ACTIONS)); ACTIONS.put("info", new ParentAction(INFO_ACTIONS)); ACTIONS.put("list", new ParentAction(LIST_ACTIONS)); ACTIONS.put("delete", new ParentAction(DELETE_ACTIONS)); ACTIONS.put("cancel", new CancelJobAction()); ACTIONS.put("load", new LoadAction()); ACTIONS.put("extract", new ExtractAction()); ACTIONS.put("copy", new CopyAction()); ACTIONS.put("query", new QueryAction()); ACTIONS.put("load-file", new LoadFileAction()); } private static void printUsage() { StringBuilder actionAndParams = new StringBuilder(); for (Map.Entry<String, BigQueryAction> entry : ACTIONS.entrySet()) { actionAndParams.append("\n\t").append(entry.getKey()); String param = entry.getValue().params(); if (param != null && !param.isEmpty()) { actionAndParams.append(' ').append(param.replace("\n", "\n\t\t")); } } System.out.printf("Usage: %s [<project_id>] operation [entity] <args>*%s%n", BigQueryExample.class.getSimpleName(), actionAndParams); } @SuppressWarnings("unchecked") public static void main(String... args) throws Exception { if (args.length < 1) { System.out.println("Missing required project id and action"); printUsage(); return; } BigQueryOptions.Builder optionsBuilder = BigQueryOptions.newBuilder(); BigQueryAction action; String actionName; if (args.length >= 2 && !ACTIONS.containsKey(args[0])) { actionName = args[1]; optionsBuilder.setProjectId(args[0]); action = ACTIONS.get(args[1]); args = Arrays.copyOfRange(args, 2, args.length); } else { actionName = args[0]; action = ACTIONS.get(args[0]); args = Arrays.copyOfRange(args, 1, args.length); } if (action == null) { System.out.println("Unrecognized action."); printUsage(); return; } BigQuery bigquery = optionsBuilder.build().getService(); Object arg; try { arg = action.parse(args); } catch (IllegalArgumentException ex) { System.out.printf("Invalid input for action '%s'. %s%n", actionName, ex.getMessage()); System.out.printf("Expected: %s%n", action.params()); return; } catch (Exception ex) { System.out.println("Failed to parse arguments."); ex.printStackTrace(); return; } action.run(bigquery, arg); } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.catalog.harvest.repository; import com.esri.gpt.catalog.arcims.ImsMetadataAdminDao; import com.esri.gpt.catalog.context.CatalogIndexException; import com.esri.gpt.framework.context.RequestContext; import com.esri.gpt.framework.sql.ManagedConnection; import com.esri.gpt.framework.util.UuidUtil; import com.esri.gpt.framework.util.Val; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.Map; import java.util.TreeMap; /** * Repository delete request. */ public class HrDeleteRequest extends HrRequest { // class variables ============================================================= // instance variables ========================================================== /** Array of uids of harvest repositories to delete. */ private String[] _uuids = new String[]{}; // constructors ================================================================ /** * Create instance of the request. * @param requestContext request context * @param uuids uuids of records to delete */ public HrDeleteRequest(RequestContext requestContext, String[] uuids) { super(requestContext, new HrCriteria(), new HrResult()); setUuids(uuids); } // properties ================================================================== /** * Gets uuids of records to delete. * @return uuids of records to delete */ public String[] getUuids() { return _uuids; } /** * Sets uuids of records to delete. * @param uuids uuids of records to delete */ public void setUuids(String[] uuids) { ArrayList<String> validUuids = new ArrayList<String>(); if (uuids != null) { for (String uuid: uuids) { uuid = Val.chkStr(uuid); if (UuidUtil.isUuid(uuid)) { validUuids.add(uuid); } } } _uuids = validUuids.toArray(new String[validUuids.size()]); } // methods ===================================================================== /** * Executes request. * @throws java.sql.SQLException if request execution fails */ public void execute() throws SQLException { Connection con = null; boolean autoCommit = true; if (getUuids().length > 0) { // intitalize PreparedStatement stJobsDelete = null; PreparedStatement stCompletedJobsDelete = null; PreparedStatement stHistoryDelete = null; ImsMetadataAdminDao adminDao = new ImsMetadataAdminDao(getRequestContext()); try { StringBuffer sbJobsDeleteSql = new StringBuffer(); StringBuffer sbCompletedJobsDeleteSql = new StringBuffer(); StringBuffer sbHistoryDeleteSql = new StringBuffer(); StringBuilder sbUuids = new StringBuilder(); for (String uuid: getUuids()) { if (sbUuids.length() > 0) { sbUuids.append(","); } sbUuids.append("'" + uuid + "'"); } sbJobsDeleteSql.append("delete from " + getHarvestingJobTableName() + " "); sbJobsDeleteSql.append("where HARVEST_ID = ?"); sbCompletedJobsDeleteSql.append("delete from " + getHarvestingJobsCompletedTableName() + " "); sbCompletedJobsDeleteSql.append("where HARVEST_ID = ?"); sbHistoryDeleteSql.append("delete from " + getHarvestingHistoryTableName() + " "); sbHistoryDeleteSql.append("where HARVEST_ID = ?"); // establish the connection ManagedConnection mc = returnConnection(); con = mc.getJdbcConnection(); autoCommit = con.getAutoCommit(); con.setAutoCommit(false); stJobsDelete = con.prepareStatement(sbJobsDeleteSql.toString()); stCompletedJobsDelete = con.prepareStatement(sbCompletedJobsDeleteSql. toString()); stHistoryDelete = con.prepareStatement(sbHistoryDeleteSql.toString()); PreparedStatement[] stmts = new PreparedStatement[]{ stJobsDelete, stCompletedJobsDelete, stHistoryDelete, }; logExpression(stJobsDelete.toString()); logExpression(stCompletedJobsDelete.toString()); logExpression(stHistoryDelete.toString()); int nRowCount = 0; for (String uuid: getUuids()) { nRowCount += executeForOne(adminDao, stmts, uuid); } getActionResult().setNumberOfRecordsModified(nRowCount); con.commit(); } catch (SQLException ex) { if (con!=null) { con.rollback(); } throw ex; } catch (Exception ex) { if (con!=null) { con.rollback(); } throw new SQLException("Error deleting record."); } finally { closeStatement(stJobsDelete); closeStatement(stHistoryDelete); closeStatement(stCompletedJobsDelete); if (con!=null) { con.setAutoCommit(autoCommit); } } } } /** * Executes all prepared statements for one UUID. * @param adminDao admin dao * @param stmts array of statements * @param uuid UUID * @return number of records affected * @throws java.sql.SQLException if statement can not be executed */ private int executeForOne(ImsMetadataAdminDao adminDao, PreparedStatement[] stmts, String uuid) throws SQLException, CatalogIndexException { int nRowCount = 0; for (PreparedStatement st: stmts) { st.setString(1, uuid); nRowCount = st.executeUpdate(); } adminDao.deleteRecord(uuid); return nRowCount; } /** * Reads all records designated to be deleted. * @return array of records to delete. * @throws SQLException if reading records fails */ private Map<String,HrRecord> readRecords() throws SQLException { TreeMap<String,HrRecord> records = new TreeMap<String,HrRecord>(); for (String uuid: getUuids()) { HrSelectRequest request = new HrSelectRequest(getRequestContext(), uuid); request.execute(); for (HrRecord hrRecord : request.getQueryResult().getRecords()) { records.put(hrRecord.getUuid(),hrRecord); } } return records; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.codeStyle.arrangement.component; import com.intellij.application.options.codeStyle.arrangement.ArrangementConstants; import com.intellij.application.options.codeStyle.arrangement.action.ArrangementRemoveConditionAction; import com.intellij.application.options.codeStyle.arrangement.animation.ArrangementAnimationPanel; import com.intellij.application.options.codeStyle.arrangement.color.ArrangementColorsProvider; import com.intellij.application.options.codeStyle.arrangement.util.InsetsPanel; import com.intellij.openapi.actionSystem.impl.ActionButton; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.codeStyle.arrangement.model.ArrangementAtomMatchCondition; import com.intellij.psi.codeStyle.arrangement.std.*; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.RoundedLineBorder; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.SimpleTextAttributes; import com.intellij.util.Consumer; import com.intellij.util.containers.ContainerUtilRt; import com.intellij.util.ui.GridBag; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.util.Set; /** * {@link ArrangementUiComponent} for {@link ArrangementAtomMatchCondition} representation. * <p/> * Not thread-safe. * * @author Denis Zhdanov * @since 8/8/12 10:06 AM */ public class ArrangementAtomMatchConditionComponent implements ArrangementUiComponent { @NotNull private static final BorderStrategy TEXT_BORDER_STRATEGY = new NameBorderStrategy(); @NotNull private static final BorderStrategy PREDEFINED_BORDER_STRATEGY = new PredefinedConditionBorderStrategy(); @NotNull private final SimpleColoredComponent myTextControl = new SimpleColoredComponent() { @NotNull @Override public Dimension getMinimumSize() { return getPreferredSize(); } @Override public Dimension getMaximumSize() { return getPreferredSize(); } @NotNull @Override public Dimension getPreferredSize() { return myTextControlSize == null ? super.getPreferredSize() : myTextControlSize; } @Override public String toString() { return "text component for " + myText; } }; @NotNull private final Set<ArrangementSettingsToken> myAvailableTokens = ContainerUtilRt.newHashSet(); @NotNull private final BorderStrategy myBorderStrategy; @NotNull private final String myText; @NotNull private final ArrangementColorsProvider myColorsProvider; @NotNull private final RoundedLineBorder myBorder; @NotNull private final ArrangementAtomMatchCondition myCondition; @NotNull private final ArrangementAnimationPanel myAnimationPanel; @Nullable private final ActionButton myCloseButton; @Nullable private final Rectangle myCloseButtonBounds; @Nullable private final Consumer<ArrangementAtomMatchConditionComponent> myCloseCallback; @NotNull private Color myBackgroundColor; @Nullable private final Dimension myTextControlSize; @Nullable private Rectangle myScreenBounds; @Nullable private Listener myListener; private boolean myInverted = false; private boolean myEnabled = true; private boolean mySelected; private boolean myCloseButtonHovered; // cached value for inverted atom condition, e.g. condition: 'static', opposite: 'not static' @Nullable private ArrangementAtomMatchCondition myOppositeCondition; @Nullable private String myInvertedText; public ArrangementAtomMatchConditionComponent(@NotNull ArrangementStandardSettingsManager manager, @NotNull ArrangementColorsProvider colorsProvider, @NotNull ArrangementAtomMatchCondition condition, @Nullable Consumer<ArrangementAtomMatchConditionComponent> closeCallback) { myColorsProvider = colorsProvider; myCondition = condition; myAvailableTokens.add(condition.getType()); myCloseCallback = closeCallback; ArrangementSettingsToken type = condition.getType(); if (StdArrangementTokenType.REG_EXP.is(type)) { myBorderStrategy = TEXT_BORDER_STRATEGY; } else { myBorderStrategy = PREDEFINED_BORDER_STRATEGY; } if (type.equals(condition.getValue()) || condition.getValue() instanceof Boolean) { myText = type.getRepresentationValue(); } else if (StdArrangementTokenType.REG_EXP.is(type)) { myText = String.format("%s %s", type.getRepresentationValue().toLowerCase(), condition.getValue()); } else { myText = condition.getValue().toString(); } myTextControl.setTextAlign(SwingConstants.CENTER); myTextControl.append(myText, SimpleTextAttributes.fromTextAttributes(colorsProvider.getTextAttributes(type, false))); myTextControl.setOpaque(false); int maxWidth = manager.getWidth(type); if (!StdArrangementTokenType.REG_EXP.is(type) && maxWidth > 0) { myTextControlSize = new Dimension(maxWidth, myTextControl.getPreferredSize().height); } else { myTextControlSize = myTextControl.getPreferredSize(); } final ArrangementRemoveConditionAction action = new ArrangementRemoveConditionAction(); Icon buttonIcon = action.getTemplatePresentation().getIcon(); Dimension buttonSize = new Dimension(buttonIcon.getIconWidth(), buttonIcon.getIconHeight()); if (closeCallback == null) { myCloseButton = null; myCloseButtonBounds = null; } else { myCloseButton = new ActionButton( action, action.getTemplatePresentation().clone(), ArrangementConstants.MATCHING_RULES_CONTROL_PLACE, buttonSize) { @Override protected Icon getIcon() { return myCloseButtonHovered ? action.getTemplatePresentation().getHoveredIcon() : action.getTemplatePresentation().getIcon(); } }; myCloseButtonBounds = new Rectangle(0, 0, buttonIcon.getIconWidth(), buttonIcon.getIconHeight()); } JPanel insetsPanel = new JPanel(new GridBagLayout()) { @Override public String toString() { return "insets panel for " + myText; } }; GridBagConstraints constraints = new GridBag().anchor(GridBagConstraints.WEST).weightx(1) .insets(0, 0, 0, myCloseButton == null ? ArrangementConstants.BORDER_ARC_SIZE : 0); insetsPanel.add(myTextControl, constraints); insetsPanel.setBorder(IdeBorderFactory.createEmptyBorder(0, ArrangementConstants.HORIZONTAL_PADDING, 0, 0)); insetsPanel.setOpaque(false); JPanel roundBorderPanel = new JPanel(new GridBagLayout()) { @Override public void paint(Graphics g) { Rectangle buttonBounds = getCloseButtonScreenBounds(); if (buttonBounds != null) { final PointerInfo info = MouseInfo.getPointerInfo(); myCloseButtonHovered = info != null && buttonBounds.contains(info.getLocation()); } Rectangle bounds = getBounds(); g.setColor(myBackgroundColor); g.fillRoundRect(0, 0, bounds.width, bounds.height, ArrangementConstants.BORDER_ARC_SIZE, ArrangementConstants.BORDER_ARC_SIZE); super.paint(g); } @Override public String toString() { return "round border panel for " + myText; } @Override protected void paintBorder(Graphics g) { myBorderStrategy.setup((Graphics2D)g); super.paintBorder(g); } }; roundBorderPanel.add(insetsPanel, new GridBag().anchor(GridBagConstraints.WEST)); if (myCloseButton != null) { roundBorderPanel.add(new InsetsPanel(myCloseButton), new GridBag().anchor(GridBagConstraints.EAST)); } myBorder = myBorderStrategy.create(); roundBorderPanel.setBorder(myBorder); roundBorderPanel.setOpaque(false); myAnimationPanel = new ArrangementAnimationPanel(roundBorderPanel, false, true) { @Override public void paint(Graphics g) { Point point = UIUtil.getLocationOnScreen(this); if (point != null) { Rectangle bounds = myAnimationPanel.getBounds(); myScreenBounds = new Rectangle(point.x, point.y, bounds.width, bounds.height); } if (!myEnabled && g instanceof Graphics2D) { ((Graphics2D)g).setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.3f)); } super.paint(g); } }; setSelected(false); if (myCloseButton != null) { myCloseButton.setVisible(false); } setData(myCondition.getValue()); } @NotNull @Override public ArrangementAtomMatchCondition getMatchCondition() { if (Boolean.valueOf(myInverted).equals(myCondition.getValue())) { if (myOppositeCondition == null) { myOppositeCondition = new ArrangementAtomMatchCondition(myCondition.getType(), !myInverted); } return myOppositeCondition; } return myCondition; } @Override public void setData(@NotNull Object data) { if (data instanceof Boolean && myCondition.getType() instanceof InvertibleArrangementSettingsToken) { myInverted = !((Boolean)data); updateComponentText(mySelected); } } @NotNull @Override public JComponent getUiComponent() { return myAnimationPanel; } @Nullable @Override public Rectangle getScreenBounds() { return myScreenBounds; } /** * Instructs current component that it should {@link #getUiComponent() draw} itself according to the given 'selected' state. * * @param selected flag that indicates if current component should be drawn as 'selected' */ @Override public void setSelected(boolean selected) { boolean notifyListener = selected != mySelected; mySelected = selected; TextAttributes attributes = updateComponentText(selected); myBorder.setColor(myColorsProvider.getBorderColor(selected)); myBackgroundColor = attributes.getBackgroundColor(); if (notifyListener && myListener != null) { myListener.stateChanged(); } } @NotNull private TextAttributes updateComponentText(boolean selected) { myTextControl.clear(); TextAttributes attributes = myColorsProvider.getTextAttributes(myCondition.getType(), selected); myTextControl.append(getComponentText(), SimpleTextAttributes.fromTextAttributes(attributes)); return attributes; } private String getComponentText() { if (myInverted) { if (StringUtil.isEmpty(myInvertedText)) { final ArrangementSettingsToken token = myCondition.getType(); assert token instanceof InvertibleArrangementSettingsToken; myInvertedText = ((InvertibleArrangementSettingsToken)token).getInvertedRepresentationValue(); } return myInvertedText; } return myText; } @Override public boolean isEnabled() { return myEnabled; } /** * Instructs current component that it should {@link #getUiComponent() draw} itself according to the given 'enabled' state. * * @param enabled flag that indicates if current component should be drawn as 'enabled' */ @Override public void setEnabled(boolean enabled) { myEnabled = enabled; if (!enabled) { setSelected(false); } } @Nullable @Override public Rectangle onMouseMove(@NotNull MouseEvent event) { Rectangle buttonBounds = getCloseButtonScreenBounds(); if (buttonBounds == null) { return null; } if (myCloseButton != null && !myCloseButton.isVisible()) { myCloseButton.setVisible(true); return buttonBounds; } boolean mouseOverButton = buttonBounds.contains(event.getLocationOnScreen()); return (mouseOverButton ^ myCloseButtonHovered) ? buttonBounds : null; } @Override public void onMouseRelease(@NotNull MouseEvent event) { Rectangle buttonBounds = getCloseButtonScreenBounds(); if (buttonBounds != null && myCloseCallback != null && buttonBounds.contains(event.getLocationOnScreen())) { myCloseCallback.consume(this); event.consume(); } } @Override public Rectangle onMouseEntered(@NotNull MouseEvent e) { if (myCloseButton != null) { myCloseButton.setVisible(true); return getCloseButtonScreenBounds(); } return null; } @Nullable @Override public Rectangle onMouseExited() { if (myCloseButton == null) { return null; } myCloseButton.setVisible(false); return getCloseButtonScreenBounds(); } @Nullable private Rectangle getCloseButtonScreenBounds() { if (myCloseButton == null || myScreenBounds == null) { return null; } Rectangle buttonBounds = SwingUtilities.convertRectangle(myCloseButton.getParent(), myCloseButtonBounds, myAnimationPanel); buttonBounds.x += myScreenBounds.x; buttonBounds.y += myScreenBounds.y; return buttonBounds; } @NotNull public ArrangementAnimationPanel getAnimationPanel() { return myAnimationPanel; } @Override public String toString() { return getComponentText(); } @NotNull @Override public ArrangementSettingsToken getToken() { return myCondition.getType(); } @NotNull @Override public Set<ArrangementSettingsToken> getAvailableTokens() { return myAvailableTokens; } @Override public void chooseToken(@NotNull ArrangementSettingsToken data) throws IllegalArgumentException, UnsupportedOperationException { throw new UnsupportedOperationException(); } @Override public boolean isSelected() { return mySelected; } @Override public void reset() { setSelected(false); setData(true); } @Override public int getBaselineToUse(int width, int height) { return -1; } @SuppressWarnings("NullableProblems") @Override public void setListener(@NotNull Listener listener) { myListener = listener; } @Override public void handleMouseClickOnSelected() { if (myInverted || !(myCondition.getType() instanceof InvertibleArrangementSettingsToken)) { setSelected(false); } setData(myInverted); } @Override public boolean alwaysCanBeActive() { return myInverted; } private interface BorderStrategy { RoundedLineBorder create(); void setup(@NotNull Graphics2D g); } private static class PredefinedConditionBorderStrategy implements BorderStrategy { @Override public RoundedLineBorder create() { return IdeBorderFactory.createRoundedBorder(ArrangementConstants.BORDER_ARC_SIZE); } @Override public void setup(@NotNull Graphics2D g) { } } private static class NameBorderStrategy implements BorderStrategy { @NotNull private final BasicStroke myStroke = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 1, new float[]{5, 5}, 0); @Override public RoundedLineBorder create() { return IdeBorderFactory.createRoundedBorder(ArrangementConstants.BORDER_ARC_SIZE, 2); } @Override public void setup(@NotNull Graphics2D g) { g.setStroke(myStroke); } } }
/* * Copyright 2018 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.usagestats; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.streamsets.datacollector.bundles.BundleType; import com.streamsets.datacollector.bundles.SupportBundle; import com.streamsets.datacollector.bundles.SupportBundleManager; import com.streamsets.datacollector.config.PipelineConfiguration; import com.streamsets.datacollector.io.DataStore; import com.streamsets.datacollector.json.ObjectMapperFactory; import com.streamsets.datacollector.main.BuildInfo; import com.streamsets.datacollector.main.RuntimeInfo; import com.streamsets.datacollector.task.AbstractTask; import com.streamsets.datacollector.util.Configuration; import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; public class StatsCollectorTask extends AbstractTask implements StatsCollector { private static final Logger LOG = LoggerFactory.getLogger(StatsCollectorTask.class); static final String ROLL_FREQUENCY_CONFIG = "stats.rollFrequency.days"; private static final int ROLL_FREQUENCY_DEFAULT = 7; private static final int REPORT_STATS_FAILED_COUNT_LIMIT = 5; private static final int REPORT_PERIOD = 60; private static final int EXTENDED_REPORT_STATS_FAILED_COUNT_LIMIT = 3; static final String OPT_FILE = "opt-stats.json"; static final String STATS_FILE = "stats.json"; static final String STATS_ACTIVE_KEY = "stats.active"; static final String STATS_LAST_REPORT_KEY = "stats.lastReport"; private final BuildInfo buildInfo; private final RuntimeInfo runtimeInfo; private final long rollFrequencyMillis; private final SafeScheduledExecutorService executorService; private final SupportBundleManager bundleManager; private final File optFile; private final File statsFile; private boolean opted; private volatile boolean active; private long lastReport; private ScheduledFuture future; private volatile StatsInfo statsInfo; private int reportStatsFailedCount; private int extendedReportStatsFailedCount; public StatsCollectorTask( BuildInfo buildInfo, RuntimeInfo runtimeInfo, Configuration config, SafeScheduledExecutorService executorService, SupportBundleManager bundleManager ) { super("StatsCollector"); this.buildInfo = buildInfo; this.runtimeInfo = runtimeInfo; rollFrequencyMillis = TimeUnit.DAYS.toMillis(config.get(ROLL_FREQUENCY_CONFIG, ROLL_FREQUENCY_DEFAULT)); this.executorService = executorService; this.bundleManager = bundleManager; optFile = new File(runtimeInfo.getDataDir(), OPT_FILE); statsFile = new File(runtimeInfo.getDataDir(), STATS_FILE); reportStatsFailedCount = 0; extendedReportStatsFailedCount = 0; } @VisibleForTesting protected BuildInfo getBuildInfo() { return buildInfo; } @VisibleForTesting protected RuntimeInfo getRuntimeInfo() { return runtimeInfo; } @VisibleForTesting protected SupportBundleManager getBundleManager() { return bundleManager; } @VisibleForTesting protected File getOptFile() { return optFile; } @VisibleForTesting protected File getStatsFile() { return statsFile; } @VisibleForTesting protected long getRollFrequencyMillis() { return rollFrequencyMillis; } @Override public StatsInfo getStatsInfo() { return statsInfo; } @Override protected void initTask() { super.initTask(); statsInfo = new StatsInfo(); if (runtimeInfo.isClusterSlave()) { opted = true; active = false; LOG.debug("Cluster slave, stats collection is disabled"); } else { opted = optFile.exists(); if (opted) { try (InputStream is = new FileInputStream(optFile)) { Map map = ObjectMapperFactory.get().readValue(is, Map.class); if (map == null) { opted = false; active = false; LOG.warn("Stats collection opt-in not properly set, switching off and re-opting"); } else { if (map.containsKey(STATS_ACTIVE_KEY)) { opted = true; active = (Boolean) map.get(STATS_ACTIVE_KEY); } if (active) { if (map.containsKey(STATS_LAST_REPORT_KEY)) { lastReport = (Long) map.get(STATS_LAST_REPORT_KEY); } } } } catch (IOException ex) { opted = false; active = false; LOG.warn("Stats collection opt-in error, switching off and re-opting. Error: {}", ex.getMessage(), ex); } } if (active) { if (statsFile.exists()) { DataStore ds = new DataStore(statsFile); try { try (InputStream is = ds.getInputStream()) { StatsInfo data = ObjectMapperFactory.get().readValue(is, StatsInfo.class); if (data == null) { opted = false; active = false; LOG.warn("Stats collection data is missing, switching off and re-opting"); } else { statsInfo = data; LOG.debug("Stats collection loaded"); } } } catch (IOException ex) { opted = false; active = false; LOG.warn("Stats collection data is invalid, switching off and re-opting. Error: {}", ex.getMessage(), ex); } } } if (!opted) { try { if (optFile.exists()) { if (optFile.delete()) { LOG.error("Could not delete opt-in status file. Stats Collection is disabled"); } } } catch (Exception ex) { LOG.error( "Could not delete opt-in status file. Stats Collection is disabled. Error: {}", ex.getMessage(), ex ); } } if (!active) { try { if (statsFile.exists()) { if (statsFile.delete()) { LOG.error("Could not delete stats collected data file. Stats Collection is disabled."); } } } catch (Exception ex) { LOG.error( "Could not delete stats collected data file. Stats Collection is disabled. Error: {}", ex.getMessage(), ex ); } } } if (!getRuntimeInfo().isClusterSlave()) { LOG.info("Stats Collection, opted '{}, active '{}'", opted, active); } // when disabled all persistency/reporting done by the Runnable is a No Op. getStatsInfo().startSystem(); getRunnable().run(); future = executorService.scheduleAtFixedRate(getRunnable(), REPORT_PERIOD, REPORT_PERIOD, TimeUnit.SECONDS); } Runnable getRunnable() { return () -> { if (active) { if (getStatsInfo().rollIfNeeded(getBuildInfo(), getRuntimeInfo(), getRollFrequencyMillis())) { LOG.debug("Stats collection data rolled"); } if (!getStatsInfo().getCollectedStats().isEmpty()) { LOG.debug("Reporting"); if (reportStats(getStatsInfo().getCollectedStats())) { LOG.debug("Reported"); reportStatsFailedCount = 0; extendedReportStatsFailedCount = 0; getStatsInfo().getCollectedStats().clear(); } else { reportStatsFailedCount++; LOG.debug("Reporting has failed {} time(s) in a row", reportStatsFailedCount); if (reportStatsFailedCount > REPORT_STATS_FAILED_COUNT_LIMIT) { reportStatsFailedCount = 0; extendedReportStatsFailedCount++; if (extendedReportStatsFailedCount > EXTENDED_REPORT_STATS_FAILED_COUNT_LIMIT) { LOG.warn("Reporting has failed too many times and will be switched off", reportStatsFailedCount); extendedReportStatsFailedCount = 0; future.cancel(false); future = executorService.scheduleAtFixedRate( getRunnable(), REPORT_PERIOD, REPORT_PERIOD, TimeUnit.SECONDS ); setActive(false); } else { int delay = (int)Math.pow(2, extendedReportStatsFailedCount - 1); LOG.warn("Reporting will back off for {} day(s)", delay); future.cancel(false); future = executorService.scheduleAtFixedRate( getRunnable(), delay * 60 * 60 * 24, REPORT_PERIOD, TimeUnit.SECONDS ); } } } } saveStats(); } }; } protected boolean reportStats(List<StatsBean> stats) { try { getBundleManager().uploadNewBundleFromInstances( Collections.singletonList(new StatsGenerator(stats)), BundleType.STATS ); return true; } catch (IOException ex) { LOG.warn("Reporting failed. Error: {}", ex.getMessage(), ex); return false; } } protected void saveStats() { DataStore ds = new DataStore(statsFile); try { try (OutputStream os = ds.getOutputStream()) { ObjectMapperFactory.get().writeValue(os, getStatsInfo().snapshot()); ds.commit(os); LOG.debug("Saved stats collections"); } } catch (IOException ex) { opted = false; active = false; LOG.warn("Could not save stats collection, Disabling and re-opting. Error: {}", ex.getMessage(), ex); } finally { ds.release(); } } @Override protected void stopTask() { if (getFuture() != null) { getFuture().cancel(false); } getStatsInfo().stopSystem(); getRunnable().run(); super.stopTask(); } @Override public boolean isOpted() { return opted; } @Override public boolean isActive() { return active; } @Override public void setActive(boolean active) { if (!isOpted() || isActive() != active) { LOG.info("Setting stats collection to '{}'", active); try (OutputStream os = new FileOutputStream(optFile)) { ObjectMapperFactory.get().writeValue( os, ImmutableMap.of(STATS_ACTIVE_KEY, active, STATS_LAST_REPORT_KEY, System.currentTimeMillis()) ); this.active = active; opted = true; } catch (IOException ex) { this.active = false; opted = false; LOG.warn("Could not change stats collection state, Disabling and re-opting. Error: {}", ex.getMessage(), ex); } getStatsInfo().reset(); saveStats(); } } @Override public void startPipeline(PipelineConfiguration pipeline) { getStatsInfo().startPipeline(pipeline); } @Override public void stopPipeline(PipelineConfiguration pipeline) { getStatsInfo().stopPipeline(pipeline); } @Override public void incrementRecordCount(long count) { getStatsInfo().incrementRecordCount(count); } public ScheduledFuture getFuture() { return future; } }
/** * Created by IntelliJ IDEA. * User: Lennart * Date: 9-feb-2004 * Time: 18:01:22 */ package com.compomics.mslims.util.fileio; import com.compomics.mslims.util.workers.LoadUltraflexXMLWorker; import org.apache.log4j.Logger; import com.compomics.mslims.util.mascot.MascotIdentifiedSpectrum; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlPullParserFactory; import java.io.*; import java.math.BigDecimal; import java.util.Iterator; import java.util.SortedSet; import java.util.TreeSet; /* * CVS information: * * $Revision: 1.4 $ * $Date: 2004/06/30 08:46:23 $ */ /** * This class represents a Bruker Ultraflex XML spectrum file. * * @author Lennart Martens * @version $Id: UltraflexXMLFile.java,v 1.4 2004/06/30 08:46:23 lennart Exp $ */ public class UltraflexXMLFile extends SpectrumFileAncestor { // Class specific log4j logger for UltraflexXMLFile instances. private static Logger logger = Logger.getLogger(UltraflexXMLFile.class); /** * The Factory for the XmlPullParser. Note that it is static. */ private static XmlPullParserFactory iFactory = null; /** * This constructor creates an in-memory representation of the specified Ultraflex XML file. * * @param aFilename String with the filename for the file. * @throws IOException whenever the XML file could not be read. */ public UltraflexXMLFile(String aFilename) throws IOException { this(new File(aFilename)); } /** * This constructor creates an in-memory representation of the specified Ultraflex XML file. * * @param aFile File with the XML file. * @throws IOException whenever the XML file could not be read. */ public UltraflexXMLFile(File aFile) throws IOException { if (iFactory == null) { try { iFactory = XmlPullParserFactory.newInstance(System.getProperty(XmlPullParserFactory.PROPERTY_NAME), null); iFactory.setNamespaceAware(true); } catch (XmlPullParserException xppe) { logger.error(xppe.getMessage(), xppe); throw new IOException("Unable to create XMLPullParserFactory: " + xppe.getMessage() + "!"); } } if (!aFile.exists()) { throw new IOException("XML file '" + aFile.getCanonicalPath() + "' was not found!"); } else { StringBuffer lsb = new StringBuffer(); BufferedReader br = new BufferedReader(new FileReader(aFile)); String line = null; while ((line = br.readLine()) != null) { lsb.append(line + "\n"); } br.close(); this.parseFromString(lsb.toString()); // Get the precursor peak mass from the pathname. // It is located in the folder called '*.LIFT.LIFT', // where '*' is the mass. Note that the mass will // contain a decimal point. this.iPrecursorMz = this.extractPrecursorMZFromPath(aFile); // Charge for MALDI systems is '+1'. this.iCharge = 1; // Set the intensity arbitrarily to '1.0'. this.iIntensity = 1.0; this.iFilename = aFile.getName(); } } /** * Compares this object with the specified object for order. Returns a negative integer, zero, or a positive * integer as this object is less than, equal to, or greater than the specified object.<p> * <p/> * In the foregoing description, the notation <tt>sgn(</tt><i>expression</i><tt>)</tt> designates the mathematical * <i>signum</i> function, which is defined to return one of <tt>-1</tt>, <tt>0</tt>, or <tt>1</tt> according to * whether the value of <i>expression</i> is negative, zero or positive. * <p/> * The implementor must ensure <tt>sgn(x.compareTo(y)) == -sgn(y.compareTo(x))</tt> for all <tt>x</tt> and * <tt>y</tt>. (This implies that <tt>x.compareTo(y)</tt> must throw an exception iff <tt>y.compareTo(x)</tt> * throws an exception.)<p> * <p/> * The implementor must also ensure that the relation is transitive: <tt>(x.compareTo(y)&gt;0 &amp;&amp; * y.compareTo(z)&gt;0)</tt> implies <tt>x.compareTo(z)&gt;0</tt>.<p> * <p/> * Finally, the implementer must ensure that <tt>x.compareTo(y)==0</tt> implies that <tt>sgn(x.compareTo(z)) == * sgn(y.compareTo(z))</tt>, for all <tt>z</tt>.<p> * <p/> * It is strongly recommended, but <i>not</i> strictly required that <tt>(x.compareTo(y)==0) == (x.equals(y))</tt>. * Generally speaking, any class that implements the <tt>Comparable</tt> interface and violates this condition * should clearly indicate this fact. The recommended language is "Note: this class has a natural ordering that is * inconsistent with equals." * * @param o the Object to be compared. * @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than * the specified object. * @throws ClassCastException if the specified object's type prevents it from being compared to this Object. */ public int compareTo(Object o) { int compared = 0; UltraflexXMLFile file = (UltraflexXMLFile) o; // See if the run numbers differ. double delta = this.iPrecursorMz - file.iPrecursorMz; if (delta < 0.0) { compared = -1; } else if (delta > 0.0) { compared = 1; } return compared; } /** * This method checks whether the MascotIdentifiedSpectrum corresponds to this spectrum. The precise method for * comparison is up to the individual implementations. * * @param aMIS MascotIdentifiedSpectrum to compare to. * @return boolean which indicates whether these objects correspond. */ public boolean corresponds(MascotIdentifiedSpectrum aMIS) { boolean result = false; if ((aMIS.getPrecursorMZ() == this.iPrecursorMz) && (aMIS.getChargeState() == this.iCharge) ) { result = true; } return result; } /** * This method allows the caller to write the spectrum file to the specified folder using its current filename. * * @param aParentDir File with the parent directory to put the file in. * @throws IOException whenever the write process failed. */ public void writeToFile(File aParentDir) throws IOException { if (!aParentDir.exists() && !aParentDir.isDirectory()) { throw new IOException("Parent '" + aParentDir.getCanonicalPath() + "' does not exist or is not a directory!"); } File output = new File(aParentDir, this.iFilename); FileOutputStream fos = new FileOutputStream(output); this.writeToStream(fos); fos.flush(); fos.close(); } /** * This method allows to write the spectrum file to the specified OutputStream. * * @param aOut OutputStream to write the file to. This Stream will <b>NOT</b> be closed by this method. * @throws IOException when the write operation fails. */ public void writeToStream(OutputStream aOut) throws IOException { String content = this.getMGFFormat(); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(aOut)); bw.write(content); bw.flush(); bw.close(); } /** * This method returns this peaklist in MGF format. * * @return String with the MGF format for this spectrum. */ public String getMGFFormat() { StringBuffer content = new StringBuffer(); // First start with the 'BEGIN IONS'. content.append("BEGIN IONS\n"); // Next up: the title. We just use the original (PKL) filename here. content.append("TITLE=" + this.iFilename + "\n"); // Now the parent mass and intensity. content.append("PEPMASS=" + this.iPrecursorMz + " " + this.iIntensity + "\n"); // Now the charge. Note the extra processing for MGF notation (eg. 1+). content.append("CHARGE=" + Math.abs(this.iCharge) + (this.iCharge > 0 ? "+" : "-") + "\n"); // Now all the peaks. SortedSet ss = new TreeSet(this.iPeaks.keySet()); Iterator it = ss.iterator(); while (it.hasNext()) { Double tempKey = (Double) it.next(); BigDecimal lDouble = new BigDecimal(tempKey.doubleValue()).setScale(4, BigDecimal.ROUND_HALF_UP); content.append(lDouble.toString() + " " + new BigDecimal(((Double) this.iPeaks.get(tempKey)).doubleValue()).setScale(4, BigDecimal.ROUND_HALF_UP).toString() + "\n"); } // Finally, 'END IONS'. content.append("END IONS\n"); return content.toString(); } /** * This method parses the XML file from the content. * * @param aContents String with the XML file contents. * @throws IOException whenever the XML format could not be parsed. */ private void parseFromString(String aContents) throws IOException { try { StringReader reader = new StringReader(aContents); XmlPullParser xpp = iFactory.newPullParser(); xpp.setInput(reader); int eventType = xpp.getEventType(); boolean validated = false; Double lastKey = null; while (eventType != XmlPullParser.END_DOCUMENT) { switch (eventType) { case XmlPullParser.START_DOCUMENT: eventType = xpp.next(); break; case XmlPullParser.START_TAG: String start = xpp.getName(); if (start.equals("pklist")) { validated = true; eventType = xpp.next(); } else if (start.equals("pk")) { lastKey = processPeak(xpp); eventType = xpp.getEventType(); } break; case XmlPullParser.END_TAG: String name = xpp.getName(); if (name.equals("pklist")) { // Precursor mass is taken from the pathname now, // and this is done in the constructor. /* // We're through. // Delete the last key from the peaklist // since this is the precursor, and // init it as such. if(lastKey != null) { Double intensity = (Double)this.iPeaks.remove(lastKey); // Always M+H for MALDI. this.iCharge = 1; this.iIntensity = intensity.doubleValue(); this.iPrecursorMz = lastKey.doubleValue(); } */ } eventType = xpp.next(); if (eventType == XmlPullParser.END_DOCUMENT) { } break; case XmlPullParser.TEXT: eventType = xpp.next(); break; default: eventType = xpp.next(); break; } } if (!validated) { throw new IOException("No root tag '<pklist>' found in the XML document!"); } } catch (XmlPullParserException xppe) { throw new IOException(xppe.getMessage()); } } /** * This method reads the data for a single peak. * * @param aXpp XmlPullParser to read the data from. * @return Double with the last key that was added to the peak list. This peak is the precursor. * @throws IOException when the XML parsing failed. * @throws XmlPullParserException when the XML pull parser encountered an error. */ private Double processPeak(XmlPullParser aXpp) throws IOException, XmlPullParserException { int eventType = aXpp.next(); boolean lbContinue = true; double mass = 0.0; double intensity = 0.0; while (lbContinue) { switch (eventType) { case XmlPullParser.START_TAG: String start = aXpp.getName(); if (start.equals("absi")) { intensity = this.getIntensity(aXpp); } else if (start.equals("mass")) { mass = this.getMass(aXpp); } eventType = aXpp.next(); break; case XmlPullParser.END_TAG: if (aXpp.getName().equals("pk")) { lbContinue = false; } eventType = aXpp.next(); break; default: eventType = aXpp.next(); break; } } // Add the peak to the peaklist. this.iPeaks.put(new Double(mass), new Double(intensity)); // Return the key. return new Double(mass); } /** * This method parses the intensity in the next tag. * * @param aXpp XMLPullParser to read the next tag and its value from. * @return double with the intensity of this peak. * @throws IOException whenever the XML parsing failed. * @throws XmlPullParserException whenever the XML parsing failed. */ private double getIntensity(XmlPullParser aXpp) throws IOException, XmlPullParserException { double result = 0.0; int eventType = aXpp.next(); if (eventType == XmlPullParser.TEXT) { result = Double.parseDouble(aXpp.getText().trim()); } else { throw new IOException("Expected text value after <absi> tag, but didn't find it!"); } return result; } /** * This method parses the mass (M+H) in the next tag. * * @param aXpp XMLPullParser to read the next tag and its value from. * @return double with the mass (M+H) of this peak. * @throws IOException whenever the XML parsing failed. * @throws XmlPullParserException whenever the XML parsing failed. */ private double getMass(XmlPullParser aXpp) throws IOException, XmlPullParserException { double result = 0.0; int eventType = aXpp.next(); if (eventType == XmlPullParser.TEXT) { result = Double.parseDouble(aXpp.getText().trim()); } else { throw new IOException("Expected text value after <mass> tag, but didn't find it!"); } return result; } /** * This method extracts the precursor mass from the pathname, based on the fact that the foldername with the mass * ends with '.LIFT.LIFT'. * * @param aFile File with the link to the original xml file. * @return double with the precursor M/Z * @throws IOException when the pathname could not be parsed. */ private double extractPrecursorMZFromPath(File aFile) throws IOException { double result = 0.0; // Find the mass... File temp = aFile.getParentFile(); boolean lbContinue = true; while (lbContinue && temp != null) { // Find the '.LIFT.*' signature. if(LoadUltraflexXMLWorker.isLiftFolder(temp)){ String mass = temp.getName().toLowerCase().substring(0, temp.getName().toLowerCase().indexOf(".lift")); try { result = Double.parseDouble(mass); lbContinue = false; } catch (NumberFormatException nfe) { throw new IOException("Unable to parse precursor mass from folder '" + temp.getName() + "' in file path " + aFile.getCanonicalPath() + "!"); } } else { temp = temp.getParentFile(); } } return result; } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.executor.impl.wih; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.List; import java.util.Properties; import javax.persistence.EntityManagerFactory; import org.jbpm.executor.ExecutorServiceFactory; import org.jbpm.executor.impl.ExecutorServiceImpl; import org.jbpm.executor.test.CountDownAsyncJobListener; import org.jbpm.process.audit.JPAAuditLogService; import org.jbpm.runtime.manager.impl.jpa.EntityManagerFactoryManager; import org.jbpm.services.task.identity.JBossUserGroupCallbackImpl; import org.jbpm.test.util.AbstractExecutorBaseTest; import org.jbpm.test.util.ExecutorTestUtil; import org.jbpm.test.util.PoolingDataSource; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kie.api.executor.CommandContext; import org.kie.api.executor.ExecutorService; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.manager.RuntimeEnvironment; import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder; import org.kie.api.runtime.manager.RuntimeManager; import org.kie.api.runtime.manager.RuntimeManagerFactory; import org.kie.api.runtime.manager.audit.NodeInstanceLog; import org.kie.api.runtime.manager.audit.ProcessInstanceLog; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.task.UserGroupCallback; import org.kie.api.task.model.TaskSummary; import org.kie.internal.io.ResourceFactory; import org.kie.internal.runtime.manager.RuntimeManagerRegistry; import org.kie.internal.runtime.manager.context.EmptyContext; public class SLATrackingCommandTest extends AbstractExecutorBaseTest { private PoolingDataSource pds; private UserGroupCallback userGroupCallback; private RuntimeManager manager; private ExecutorService executorService; private EntityManagerFactory emf = null; @Before public void setup() { ExecutorTestUtil.cleanupSingletonSessionId(); pds = ExecutorTestUtil.setupPoolingDataSource(); Properties properties= new Properties(); properties.setProperty("mary", "HR"); properties.setProperty("john", "HR"); userGroupCallback = new JBossUserGroupCallbackImpl(properties); executorService = buildExecutorService(); } @After public void teardown() { executorService.destroy(); if (manager != null) { RuntimeManagerRegistry.get().remove(manager.getIdentifier()); manager.close(); } if (emf != null) { emf.close(); } pds.close(); } protected CountDownAsyncJobListener configureListener(int threads) { CountDownAsyncJobListener countDownListener = new CountDownAsyncJobListener(threads); ((ExecutorServiceImpl) executorService).addAsyncJobListener(countDownListener); return countDownListener; } @Test public void testSLATrackingOnProcessInstance() throws Exception { CountDownAsyncJobListener countDownListener = configureListener(1); RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder() .userGroupCallback(userGroupCallback) .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("BPMN2-UserTaskWithSLA.bpmn2"), ResourceType.BPMN2) .addEnvironmentEntry("SLATimerMode", "false") .get(); manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); assertNotNull(manager); RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get()); KieSession ksession = runtime.getKieSession(); assertNotNull(ksession); ProcessInstance processInstance = ksession.startProcess("UserTask"); assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState()); JPAAuditLogService logService = new JPAAuditLogService(emf); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_PENDING); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_PENDING); // wait for due date of SLA to pass Thread.sleep(3000); countDownListener.reset(1); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_VIOLATED); } @Test public void testSLATrackingOnUserTask() throws Exception { CountDownAsyncJobListener countDownListener = configureListener(1); RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder() .userGroupCallback(userGroupCallback) .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("BPMN2-UserTaskWithSLAOnTask.bpmn2"), ResourceType.BPMN2) .addEnvironmentEntry("SLATimerMode", "false") .get(); manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); assertNotNull(manager); RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get()); KieSession ksession = runtime.getKieSession(); assertNotNull(ksession); ProcessInstance processInstance = ksession.startProcess("UserTask"); assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState()); List<TaskSummary> tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK"); assertEquals(1, tasks.size()); JPAAuditLogService logService = new JPAAuditLogService(emf); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_PENDING); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_PENDING); // wait for due date of SLA to pass Thread.sleep(3000); countDownListener.reset(1); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); runtime.getTaskService().start(tasks.get(0).getId(), "john"); runtime.getTaskService().complete(tasks.get(0).getId(), "john", null); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_VIOLATED); } @Test public void testSLATrackingOnProcessInstanceSLAMet() throws Exception { CountDownAsyncJobListener countDownListener = configureListener(1); RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder() .userGroupCallback(userGroupCallback) .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("BPMN2-UserTaskWithSLA.bpmn2"), ResourceType.BPMN2) .addEnvironmentEntry("SLATimerMode", "false") .get(); manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); assertNotNull(manager); RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get()); KieSession ksession = runtime.getKieSession(); assertNotNull(ksession); ProcessInstance processInstance = ksession.startProcess("UserTask"); assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState()); List<TaskSummary> tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK"); assertEquals(1, tasks.size()); JPAAuditLogService logService = new JPAAuditLogService(emf); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_PENDING); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_PENDING); runtime.getTaskService().start(tasks.get(0).getId(), "john"); runtime.getTaskService().complete(tasks.get(0).getId(), "john", null); assertProcessInstanceSLACompliance(logService, processInstance.getId(), ProcessInstance.SLA_MET); } @Test public void testSLATrackingOnUserTaskSLAMet() throws Exception { CountDownAsyncJobListener countDownListener = configureListener(1); RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder() .userGroupCallback(userGroupCallback) .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("BPMN2-UserTaskWithSLAOnTask.bpmn2"), ResourceType.BPMN2) .addEnvironmentEntry("SLATimerMode", "false") .get(); manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); assertNotNull(manager); RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get()); KieSession ksession = runtime.getKieSession(); assertNotNull(ksession); ProcessInstance processInstance = ksession.startProcess("UserTask"); assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState()); List<TaskSummary> tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK"); assertEquals(1, tasks.size()); JPAAuditLogService logService = new JPAAuditLogService(emf); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_PENDING); scheduleSLATracking(manager.getIdentifier()); countDownListener.waitTillCompleted(); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_PENDING); runtime.getTaskService().start(tasks.get(0).getId(), "john"); runtime.getTaskService().complete(tasks.get(0).getId(), "john", null); assertNodeInstanceSLACompliance(logService, processInstance.getId(), "Hello", ProcessInstance.SLA_MET); } private ExecutorService buildExecutorService() { emf = EntityManagerFactoryManager.get().getOrCreate("org.jbpm.persistence.complete"); executorService = ExecutorServiceFactory.newExecutorService(emf); executorService.init(); return executorService; } private void scheduleSLATracking(String identifier) { CommandContext commandContext = new CommandContext(); commandContext.setData("EmfName", "org.jbpm.persistence.complete"); commandContext.setData("SingleRun", "true"); commandContext.setData("ForDeployment", identifier); executorService.scheduleRequest("org.jbpm.executor.commands.SLATrackingCommand", commandContext); } private void assertProcessInstanceSLACompliance(JPAAuditLogService logService, Long processInstanceId, int slaCompliance) { List<ProcessInstanceLog> logs = logService.processInstanceLogQuery() .processInstanceId(processInstanceId) .build() .getResultList(); assertEquals(1, logs.size()); ProcessInstanceLog log = logs.get(0); assertEquals(processInstanceId, log.getProcessInstanceId()); assertEquals(slaCompliance, ((org.jbpm.process.audit.ProcessInstanceLog)log).getSlaCompliance().intValue()); } private void assertNodeInstanceSLACompliance(JPAAuditLogService logService, Long processInstanceId, String name, int slaCompliance) { List<NodeInstanceLog> logs = logService.nodeInstanceLogQuery() .processInstanceId(processInstanceId).and() .nodeName(name) .build() .getResultList(); NodeInstanceLog log = logs.get(logs.size() - 1); assertEquals(processInstanceId, log.getProcessInstanceId()); assertEquals(slaCompliance, ((org.jbpm.process.audit.NodeInstanceLog)log).getSlaCompliance().intValue()); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1beta1/cluster_service.proto package com.google.container.v1beta1; /** * * * <pre> * GetNodePoolRequest retrieves a node pool for a cluster. * </pre> * * Protobuf type {@code google.container.v1beta1.GetNodePoolRequest} */ public final class GetNodePoolRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1beta1.GetNodePoolRequest) GetNodePoolRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetNodePoolRequest.newBuilder() to construct. private GetNodePoolRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetNodePoolRequest() { projectId_ = ""; zone_ = ""; clusterId_ = ""; nodePoolId_ = ""; name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetNodePoolRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNodePoolRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); projectId_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); zone_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); clusterId_ = s; break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); nodePoolId_ = s; break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_GetNodePoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_GetNodePoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1beta1.GetNodePoolRequest.class, com.google.container.v1beta1.GetNodePoolRequest.Builder.class); } public static final int PROJECT_ID_FIELD_NUMBER = 1; private volatile java.lang.Object projectId_; /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The projectId. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for projectId. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ZONE_FIELD_NUMBER = 2; private volatile java.lang.Object zone_; /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @return The zone. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } } /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for zone. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CLUSTER_ID_FIELD_NUMBER = 3; private volatile java.lang.Object clusterId_; /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The clusterId. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); clusterId_ = s; return s; } } /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for clusterId. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getClusterIdBytes() { java.lang.Object ref = clusterId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); clusterId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NODE_POOL_ID_FIELD_NUMBER = 4; private volatile java.lang.Object nodePoolId_; /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The nodePoolId. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getNodePoolId() { java.lang.Object ref = nodePoolId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nodePoolId_ = s; return s; } } /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for nodePoolId. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getNodePoolIdBytes() { java.lang.Object ref = nodePoolId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nodePoolId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 6; private volatile java.lang.Object name_; /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(clusterId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, clusterId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nodePoolId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, nodePoolId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(clusterId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, clusterId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nodePoolId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, nodePoolId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1beta1.GetNodePoolRequest)) { return super.equals(obj); } com.google.container.v1beta1.GetNodePoolRequest other = (com.google.container.v1beta1.GetNodePoolRequest) obj; if (!getProjectId().equals(other.getProjectId())) return false; if (!getZone().equals(other.getZone())) return false; if (!getClusterId().equals(other.getClusterId())) return false; if (!getNodePoolId().equals(other.getNodePoolId())) return false; if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + ZONE_FIELD_NUMBER; hash = (53 * hash) + getZone().hashCode(); hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); hash = (37 * hash) + NODE_POOL_ID_FIELD_NUMBER; hash = (53 * hash) + getNodePoolId().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1beta1.GetNodePoolRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1beta1.GetNodePoolRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1beta1.GetNodePoolRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1beta1.GetNodePoolRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * GetNodePoolRequest retrieves a node pool for a cluster. * </pre> * * Protobuf type {@code google.container.v1beta1.GetNodePoolRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1beta1.GetNodePoolRequest) com.google.container.v1beta1.GetNodePoolRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_GetNodePoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_GetNodePoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1beta1.GetNodePoolRequest.class, com.google.container.v1beta1.GetNodePoolRequest.Builder.class); } // Construct using com.google.container.v1beta1.GetNodePoolRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); projectId_ = ""; zone_ = ""; clusterId_ = ""; nodePoolId_ = ""; name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_GetNodePoolRequest_descriptor; } @java.lang.Override public com.google.container.v1beta1.GetNodePoolRequest getDefaultInstanceForType() { return com.google.container.v1beta1.GetNodePoolRequest.getDefaultInstance(); } @java.lang.Override public com.google.container.v1beta1.GetNodePoolRequest build() { com.google.container.v1beta1.GetNodePoolRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1beta1.GetNodePoolRequest buildPartial() { com.google.container.v1beta1.GetNodePoolRequest result = new com.google.container.v1beta1.GetNodePoolRequest(this); result.projectId_ = projectId_; result.zone_ = zone_; result.clusterId_ = clusterId_; result.nodePoolId_ = nodePoolId_; result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1beta1.GetNodePoolRequest) { return mergeFrom((com.google.container.v1beta1.GetNodePoolRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1beta1.GetNodePoolRequest other) { if (other == com.google.container.v1beta1.GetNodePoolRequest.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; onChanged(); } if (!other.getZone().isEmpty()) { zone_ = other.zone_; onChanged(); } if (!other.getClusterId().isEmpty()) { clusterId_ = other.clusterId_; onChanged(); } if (!other.getNodePoolId().isEmpty()) { nodePoolId_ = other.nodePoolId_; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.container.v1beta1.GetNodePoolRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.container.v1beta1.GetNodePoolRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object projectId_ = ""; /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The projectId. */ @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for projectId. */ @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The Google Developers Console [project ID or project * number](https://developers.google.com/console/help/new/#projectnumber). * This field has been deprecated and replaced by the name field. * </pre> * * <code>string project_id = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The bytes for projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; onChanged(); return this; } private java.lang.Object zone_ = ""; /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @return The zone. */ @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for zone. */ @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @param value The zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZone(java.lang.String value) { if (value == null) { throw new NullPointerException(); } zone_ = value; onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearZone() { zone_ = getDefaultInstance().getZone(); onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) in which the * cluster resides. This field has been deprecated and replaced by the name * field. * </pre> * * <code>string zone = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZoneBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); zone_ = value; onChanged(); return this; } private java.lang.Object clusterId_ = ""; /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The clusterId. */ @java.lang.Deprecated public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); clusterId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for clusterId. */ @java.lang.Deprecated public com.google.protobuf.ByteString getClusterIdBytes() { java.lang.Object ref = clusterId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); clusterId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The clusterId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setClusterId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } clusterId_ = value; onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearClusterId() { clusterId_ = getDefaultInstance().getClusterId(); onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the cluster. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string cluster_id = 3 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The bytes for clusterId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setClusterIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); clusterId_ = value; onChanged(); return this; } private java.lang.Object nodePoolId_ = ""; /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The nodePoolId. */ @java.lang.Deprecated public java.lang.String getNodePoolId() { java.lang.Object ref = nodePoolId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nodePoolId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for nodePoolId. */ @java.lang.Deprecated public com.google.protobuf.ByteString getNodePoolIdBytes() { java.lang.Object ref = nodePoolId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nodePoolId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The nodePoolId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setNodePoolId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodePoolId_ = value; onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearNodePoolId() { nodePoolId_ = getDefaultInstance().getNodePoolId(); onChanged(); return this; } /** * * * <pre> * Required. Deprecated. The name of the node pool. * This field has been deprecated and replaced by the name field. * </pre> * * <code>string node_pool_id = 4 [deprecated = true, (.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The bytes for nodePoolId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setNodePoolIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nodePoolId_ = value; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * The name (project, location, cluster, node pool id) of the node pool to * get. Specified in the format * `projects/&#42;&#47;locations/&#42;&#47;clusters/&#42;&#47;nodePools/&#42;`. * </pre> * * <code>string name = 6;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1beta1.GetNodePoolRequest) } // @@protoc_insertion_point(class_scope:google.container.v1beta1.GetNodePoolRequest) private static final com.google.container.v1beta1.GetNodePoolRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1beta1.GetNodePoolRequest(); } public static com.google.container.v1beta1.GetNodePoolRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetNodePoolRequest> PARSER = new com.google.protobuf.AbstractParser<GetNodePoolRequest>() { @java.lang.Override public GetNodePoolRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetNodePoolRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetNodePoolRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetNodePoolRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1beta1.GetNodePoolRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.hive.mapreduce; import static org.apache.phoenix.monitoring.MetricType.SCAN_BYTES; import java.io.IOException; import java.sql.SQLException; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapreduce.lib.db.DBWritable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.coprocessor.BaseScannerRegionObserver; import org.apache.phoenix.hive.PhoenixRowKey; import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil; import org.apache.phoenix.iterate.ConcatResultIterator; import org.apache.phoenix.iterate.LookAheadResultIterator; import org.apache.phoenix.iterate.MapReduceParallelScanGrouper; import org.apache.phoenix.iterate.PeekingResultIterator; import org.apache.phoenix.iterate.ResultIterator; import org.apache.phoenix.iterate.RoundRobinResultIterator; import org.apache.phoenix.iterate.SequenceResultIterator; import org.apache.phoenix.iterate.TableResultIterator; import org.apache.phoenix.jdbc.PhoenixResultSet; import org.apache.phoenix.monitoring.ReadMetricQueue; import com.google.common.base.Throwables; import com.google.common.collect.Lists; /** * @RecordReader implementation that iterates over the the records. */ @SuppressWarnings("rawtypes") public class PhoenixRecordReader<T extends DBWritable> implements RecordReader<WritableComparable, T> { private static final Log LOG = LogFactory.getLog(PhoenixRecordReader.class); private final Configuration configuration; private final QueryPlan queryPlan; private WritableComparable key; private T value = null; private Class<T> inputClass; private ResultIterator resultIterator = null; private PhoenixResultSet resultSet; private long readCount; private boolean isTransactional; public PhoenixRecordReader(Class<T> inputClass, final Configuration configuration, final QueryPlan queryPlan) throws IOException { this.inputClass = inputClass; this.configuration = configuration; this.queryPlan = queryPlan; isTransactional = PhoenixStorageHandlerUtil.isTransactionalTable(configuration); } public void initialize(InputSplit split) throws IOException { final PhoenixInputSplit pSplit = (PhoenixInputSplit) split; final List<Scan> scans = pSplit.getScans(); if (LOG.isInfoEnabled()) { LOG.info("Target table : " + queryPlan.getTableRef().getTable().getPhysicalName()); } if (LOG.isDebugEnabled()) { LOG.debug("Scan count[" + scans.size() + "] : " + Bytes.toStringBinary(scans.get(0) .getStartRow()) + " ~ " + Bytes.toStringBinary(scans.get(scans.size() - 1) .getStopRow())); LOG.debug("First scan : " + scans.get(0) + " scanAttribute : " + scans.get(0) .getAttributesMap()); for (int i = 0, limit = scans.size(); i < limit; i++) { LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " + Bytes.toStringBinary(scans.get(i).getAttribute(BaseScannerRegionObserver .EXPECTED_UPPER_REGION_KEY))); } } try { List<PeekingResultIterator> iterators = Lists.newArrayListWithExpectedSize(scans.size ()); StatementContext ctx = queryPlan.getContext(); ReadMetricQueue readMetrics = ctx.getReadMetricsQueue(); String tableName = queryPlan.getTableRef().getTable().getPhysicalName().getString(); long renewScannerLeaseThreshold = queryPlan.getContext().getConnection() .getQueryServices().getRenewLeaseThresholdMilliSeconds(); for (Scan scan : scans) { scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes .toBytes(true)); final TableResultIterator tableResultIterator = new TableResultIterator(queryPlan .getContext().getConnection().getMutationState(), scan, readMetrics.allotMetric(SCAN_BYTES, tableName), renewScannerLeaseThreshold, queryPlan, MapReduceParallelScanGrouper.getInstance() ); PeekingResultIterator peekingResultIterator = LookAheadResultIterator.wrap (tableResultIterator); iterators.add(peekingResultIterator); } ResultIterator iterator = queryPlan.useRoundRobinIterator() ? RoundRobinResultIterator.newIterator(iterators, queryPlan) : ConcatResultIterator.newIterator(iterators); if (queryPlan.getContext().getSequenceManager().getSequenceCount() > 0) { iterator = new SequenceResultIterator(iterator, queryPlan.getContext() .getSequenceManager()); } this.resultIterator = iterator; // Clone the row projector as it's not thread safe and would be used // simultaneously by multiple threads otherwise. this.resultSet = new PhoenixResultSet(this.resultIterator, queryPlan.getProjector() .cloneIfNecessary(), queryPlan.getContext()); } catch (SQLException e) { LOG.error(String.format(" Error [%s] initializing PhoenixRecordReader. ", e .getMessage())); Throwables.propagate(e); } } @Override public boolean next(WritableComparable key, T value) throws IOException { try { if (!resultSet.next()) { return false; } value.readFields(resultSet); if (isTransactional) { ((PhoenixResultWritable) value).readPrimaryKey((PhoenixRowKey) key); } ++readCount; if (LOG.isTraceEnabled()) { LOG.trace("Result[" + readCount + "] : " + ((PhoenixResultWritable) value) .getResultMap()); } return true; } catch (SQLException e) { LOG.error(String.format(" Error [%s] occurred while iterating over the resultset. ", e.getMessage())); throw new RuntimeException(e); } } @Override public WritableComparable createKey() { if (isTransactional) { key = new PhoenixRowKey(); } else { key = NullWritable.get(); } return key; } @Override public T createValue() { value = ReflectionUtils.newInstance(inputClass, this.configuration); return value; } @Override public long getPos() throws IOException { return 0; } @Override public void close() throws IOException { if (LOG.isInfoEnabled()) { LOG.info("Read Count : " + readCount); } if (resultIterator != null) { try { resultIterator.close(); } catch (SQLException e) { LOG.error(" Error closing resultset."); throw new RuntimeException(e); } } } @Override public float getProgress() throws IOException { return 0; } }
import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Scanner; import javax.swing.JOptionPane; import javax.swing.JToggleButton; public class DBwork implements ActionListener{ private static final String JDBC_DRIVER = "com.mysql.jdbc.Driver"; private static final String DB_URL = "jdbc:mysql://localhost/jmok?useUnicode=yes&amp;characterEncoding=UTF-8&amp;autoReconnect=true"; private static final String TABLE = "jmok"; private static final String TABLE2 = "move"; private static final String USER = "root";//"hh"; private static final String PASS = "1234";//"1111";CL private static String sql = null; private static String names = null; private static String username = null; private static String yourname = null; private static Connection conn = null; private static Statement stmt = null; private static ResultSet rs = null; private static int x, y, count, mcount; public static void init(){ x=0; y=0; count=0; } public static void set19(){ x=19; y=19; } public static String checkName(){ try{ while(true){ username = JOptionPane.showInputDialog(null, Jmok.Icon, "Login", JOptionPane.PLAIN_MESSAGE); if(username==null||username.length()==0){ if(Jmok.isStarted==0) System.exit(0); } else if(username.length()>=20){ //name -> VARCHAR(20) JOptionPane.showMessageDialog(null, "The Username should be less than 15 characters", "Try Again", JOptionPane.ERROR_MESSAGE); continue; } else if(username.equals("jen")){ JOptionPane.showMessageDialog(null, "The Username is already Taken...", "Try Again", JOptionPane.ERROR_MESSAGE); continue; } Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "SELECT * FROM "+TABLE+" WHERE name = '"+username+"'"; //Execute a query rs = stmt.executeQuery(sql); if(rs.next() == false) break; else JOptionPane.showMessageDialog(null, "The Username is already Taken...", "Try Again", JOptionPane.ERROR_MESSAGE); } }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); Jmok.isOffline = 1; //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ JOptionPane.showMessageDialog(null, "Cannot find mysql driver...", "Driver Error", JOptionPane.ERROR_MESSAGE); //Handle errors for Class.forName JOptionPane.showMessageDialog(null, "Download JDBC driver from \nhttps://dev.mysql.com/downloads/connector/j/", "Install Driver", JOptionPane.ERROR_MESSAGE); ed.printStackTrace(); Jmok.isOffline = 1; } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } return username; } public static boolean updateMove(){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "SELECT move FROM "+TABLE2; //Execute a query rs = stmt.executeQuery(sql); mcount=0; while(rs.next()){ Idea.moves[mcount] = rs.getInt("move"); System.out.println(Idea.moves[mcount]); mcount++; } if(mcount!=12){ return false; } }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); return false; }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); return false; } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } return true; } public static void findWhite(){ Jmok.opponents.clear(); //remove all Jmok.opponents.add("AI");//add AI by default try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "SELECT name FROM "+TABLE+" WHERE count = -1 AND remark = 'jen'"; //Execute a query rs = stmt.executeQuery(sql); while(rs.next()){ names = rs.getString("name"); Jmok.opponents.add(names); } }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } } public static String showBlack(){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "INSERT INTO "+TABLE+" VALUES('"+Jmok.name[Jmok.I]+"', "+(-1)+", "+x+", "+y+", '"+Jmok.name[Jmok.You]+"')"; stmt.executeUpdate(sql); }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } return username; } public static String findBlack(){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "INSERT INTO "+TABLE+" VALUES('"+Jmok.name[Jmok.I]+"', "+(-1)+", "+x+", "+y+", 'jen')"; stmt.executeUpdate(sql); while(true){ sql = "SELECT name FROM " + TABLE + " WHERE count = -1 AND remark = '"+Jmok.name[Jmok.I]+"'"; rs = stmt.executeQuery(sql); if(rs.next() == true) { yourname=rs.getString("name"); break; } Thread.sleep(500); //polling } }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } cleanDB(Jmok.name[Jmok.I], -1); cleanDB(yourname, -1); return yourname; } public static void Resign(){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "INSERT INTO "+TABLE+" VALUES('"+Jmok.name[Jmok.I]+"', "+count+", 19, 0, 'resign')"; stmt.executeUpdate(sql); }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } } public static void cleanDB(String erase){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "DELETE FROM "+TABLE+" WHERE name = '"+erase+"'"; stmt.executeUpdate(sql); //Execute a query System.out.println(erase+" -> Clean DB : Successful!"); }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } } public static void cleanDB(String erase, int c){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "DELETE FROM "+TABLE+" WHERE name = '"+erase+"' AND count = "+c; stmt.executeUpdate(sql); //Execute a query System.out.println(erase+" -> Clean DB("+c+") : Successful!"); }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); } } } public static void useDB(){ try{ Class.forName("com.mysql.jdbc.Driver"); //Register JDBC driver conn = DriverManager.getConnection(DB_URL,USER,PASS); //Open a connection stmt = conn.createStatement(); sql = "INSERT INTO "+TABLE+" VALUES('"+Jmok.name[Jmok.I]+"', "+count+", "+x+", "+y+", null)"; stmt.executeUpdate(sql); if(Jmok.isGo==0) Idea.scanBoard(); if(Jmok.end==0){ while(true){ sql = "SELECT x, y FROM " + TABLE + " WHERE count >= " + count + " AND name = '"+Jmok.name[Jmok.You]+"'"; rs = stmt.executeQuery(sql); if(rs.next() == true) { x = rs.getInt("x"); y = rs.getInt("y"); if(x==19){ if(y==19){ Jmok.JMOK.setVisible(false); Jmok.clock.t.stop(); Jmok.Win(6);//OK } else if(y==0){ Jmok.JMOK.setVisible(false); Jmok.clock.t.stop(); DBwork.cleanDB(Jmok.name[Jmok.You]); Jmok.Win(7);//OK } } break; } Thread.sleep(500); //polling } Jmok.clara[x][y].setIcon(Jmok.Jmoks[Jmok.You]); Jmok.clara[x][y].setRolloverIcon(null); if(Jmok.board[x+1][y+1]!=0){ Jmok.JMOK.setVisible(false); Jmok.clock.t.stop(); Jmok.Win(4); } else Jmok.board[x+1][y+1]=Jmok.You; } rs.close(); }catch(ArrayIndexOutOfBoundsException ae){ //Nothing to do }catch(SQLException se){ JOptionPane.showMessageDialog(null, "Cannot connect to mysql server...", "mySQL Error", JOptionPane.ERROR_MESSAGE); //Handle errors for JDBC se.printStackTrace(); }catch(Exception ed){ //Handle errors for Class.forName and ArrayOutOfBoundsException ed.printStackTrace(); } finally{ //finally block used to close resources try{ if(stmt!=null) stmt.close(); }catch(SQLException se2){} //nothing to do try{ if(conn!=null) conn.close(); }catch(SQLException se){ se.printStackTrace(); //Jmok.myTurn=1; } count++; } } public void actionPerformed(ActionEvent e){ JToggleButton b = (JToggleButton) e.getSource(); x = (int)b.getClientProperty("column"); y = (int)b.getClientProperty("row"); count++; //before the query Jmok.movecount.setText(Integer.toString(count)); if(Jmok.board[x+1][y+1]!=0){ Jmok.board[0][0]=4; Jmok.end=1; } else{ Jmok.board[x+1][y+1]=Jmok.I; Jmok.clara[x][y].setIcon(Jmok.Jmoks[Jmok.I]); } if(Jmok.name[Jmok.You].equals("AI")){ Idea.aIntelligence(); Jmok.clara[Idea.getX()-1][Idea.getY()-1].setIcon(Jmok.Jmoks[Jmok.You]); Jmok.clara[Idea.getX()-1][Idea.getY()-1].setRolloverIcon(null); } else{ useDB(); } if(Jmok.end==0&&Jmok.isGo==0) Idea.scanBoard(); } }
/* The following code was generated by JFlex 1.5.1 */ package org.apache.lucene.analysis.standard; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* WARNING: if you change ClassicTokenizerImpl.jflex and need to regenerate the tokenizer, only use the trunk version of JFlex 1.5 at the moment! */ import java.io.Reader; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @SuppressWarnings("fallthrough") /** * This class implements the classic lucene StandardTokenizer up until 3.0 */ class ClassicTokenizerImpl { /** This character denotes the end of file */ public static final int YYEOF = -1; /** initial size of the lookahead buffer */ private static final int ZZ_BUFFERSIZE = 4096; /** lexical states */ public static final int YYINITIAL = 0; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l * at the beginning of a line * l is of the form l = 2*k, k a non negative integer */ private static final int ZZ_LEXSTATE[] = { 0, 0 }; /** * Translates characters to character classes */ private static final String ZZ_CMAP_PACKED = "\46\0\1\5\1\3\4\0\1\11\1\7\1\4\1\11\12\2\6\0"+ "\1\6\32\12\4\0\1\10\1\0\32\12\57\0\1\12\12\0\1\12"+ "\4\0\1\12\5\0\27\12\1\0\37\12\1\0\u0128\12\2\0\22\12"+ "\34\0\136\12\2\0\11\12\2\0\7\12\16\0\2\12\16\0\5\12"+ "\11\0\1\12\213\0\1\12\13\0\1\12\1\0\3\12\1\0\1\12"+ "\1\0\24\12\1\0\54\12\1\0\10\12\2\0\32\12\14\0\202\12"+ "\12\0\71\12\2\0\2\12\2\0\2\12\3\0\46\12\2\0\2\12"+ "\67\0\46\12\2\0\1\12\7\0\47\12\110\0\33\12\5\0\3\12"+ "\56\0\32\12\5\0\13\12\25\0\12\2\7\0\143\12\1\0\1\12"+ "\17\0\2\12\11\0\12\2\3\12\23\0\1\12\1\0\33\12\123\0"+ "\46\12\u015f\0\65\12\3\0\1\12\22\0\1\12\7\0\12\12\4\0"+ "\12\2\25\0\10\12\2\0\2\12\2\0\26\12\1\0\7\12\1\0"+ "\1\12\3\0\4\12\42\0\2\12\1\0\3\12\4\0\12\2\2\12"+ "\23\0\6\12\4\0\2\12\2\0\26\12\1\0\7\12\1\0\2\12"+ "\1\0\2\12\1\0\2\12\37\0\4\12\1\0\1\12\7\0\12\2"+ "\2\0\3\12\20\0\7\12\1\0\1\12\1\0\3\12\1\0\26\12"+ "\1\0\7\12\1\0\2\12\1\0\5\12\3\0\1\12\22\0\1\12"+ "\17\0\1\12\5\0\12\2\25\0\10\12\2\0\2\12\2\0\26\12"+ "\1\0\7\12\1\0\2\12\2\0\4\12\3\0\1\12\36\0\2\12"+ "\1\0\3\12\4\0\12\2\25\0\6\12\3\0\3\12\1\0\4\12"+ "\3\0\2\12\1\0\1\12\1\0\2\12\3\0\2\12\3\0\3\12"+ "\3\0\10\12\1\0\3\12\55\0\11\2\25\0\10\12\1\0\3\12"+ "\1\0\27\12\1\0\12\12\1\0\5\12\46\0\2\12\4\0\12\2"+ "\25\0\10\12\1\0\3\12\1\0\27\12\1\0\12\12\1\0\5\12"+ "\44\0\1\12\1\0\2\12\4\0\12\2\25\0\10\12\1\0\3\12"+ "\1\0\27\12\1\0\20\12\46\0\2\12\4\0\12\2\25\0\22\12"+ "\3\0\30\12\1\0\11\12\1\0\1\12\2\0\7\12\71\0\1\1"+ "\60\12\1\1\2\12\14\1\7\12\11\1\12\2\47\0\2\12\1\0"+ "\1\12\2\0\2\12\1\0\1\12\2\0\1\12\6\0\4\12\1\0"+ "\7\12\1\0\3\12\1\0\1\12\1\0\1\12\2\0\2\12\1\0"+ "\4\12\1\0\2\12\11\0\1\12\2\0\5\12\1\0\1\12\11\0"+ "\12\2\2\0\2\12\42\0\1\12\37\0\12\2\26\0\10\12\1\0"+ "\42\12\35\0\4\12\164\0\42\12\1\0\5\12\1\0\2\12\25\0"+ "\12\2\6\0\6\12\112\0\46\12\12\0\47\12\11\0\132\12\5\0"+ "\104\12\5\0\122\12\6\0\7\12\1\0\77\12\1\0\1\12\1\0"+ "\4\12\2\0\7\12\1\0\1\12\1\0\4\12\2\0\47\12\1\0"+ "\1\12\1\0\4\12\2\0\37\12\1\0\1\12\1\0\4\12\2\0"+ "\7\12\1\0\1\12\1\0\4\12\2\0\7\12\1\0\7\12\1\0"+ "\27\12\1\0\37\12\1\0\1\12\1\0\4\12\2\0\7\12\1\0"+ "\47\12\1\0\23\12\16\0\11\2\56\0\125\12\14\0\u026c\12\2\0"+ "\10\12\12\0\32\12\5\0\113\12\225\0\64\12\54\0\12\2\46\0"+ "\12\2\6\0\130\12\10\0\51\12\u0557\0\234\12\4\0\132\12\6\0"+ "\26\12\2\0\6\12\2\0\46\12\2\0\6\12\2\0\10\12\1\0"+ "\1\12\1\0\1\12\1\0\1\12\1\0\37\12\2\0\65\12\1\0"+ "\7\12\1\0\1\12\3\0\3\12\1\0\7\12\3\0\4\12\2\0"+ "\6\12\4\0\15\12\5\0\3\12\1\0\7\12\202\0\1\12\202\0"+ "\1\12\4\0\1\12\2\0\12\12\1\0\1\12\3\0\5\12\6\0"+ "\1\12\1\0\1\12\1\0\1\12\1\0\4\12\1\0\3\12\1\0"+ "\7\12\u0ecb\0\2\12\52\0\5\12\12\0\1\13\124\13\10\13\2\13"+ "\2\13\132\13\1\13\3\13\6\13\50\13\3\13\1\0\136\12\21\0"+ "\30\12\70\0\20\13\u0100\0\200\13\200\0\u19b6\13\12\13\100\0\u51a6\13"+ "\132\13\u048d\12\u0773\0\u2ba4\12\u215c\0\u012e\13\322\13\7\12\14\0\5\12"+ "\5\0\1\12\1\0\12\12\1\0\15\12\1\0\5\12\1\0\1\12"+ "\1\0\2\12\1\0\2\12\1\0\154\12\41\0\u016b\12\22\0\100\12"+ "\2\0\66\12\50\0\14\12\164\0\3\12\1\0\1\12\1\0\207\12"+ "\23\0\12\2\7\0\32\12\6\0\32\12\12\0\1\13\72\13\37\12"+ "\3\0\6\12\2\0\6\12\2\0\6\12\2\0\3\12\43\0"; /** * Translates characters to character classes */ private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED); /** * Translates DFA states to action switch labels. */ private static final int [] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\1\0\1\1\3\2\1\3\13\0\1\2\3\4\2\0"+ "\1\5\1\0\1\5\3\4\6\5\1\6\1\4\2\7"+ "\1\10\1\0\1\10\3\0\2\10\1\11\1\12\1\4"; private static int [] zzUnpackAction() { int [] result = new int[50]; int offset = 0; offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int zzUnpackAction(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int [] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\14\0\30\0\44\0\60\0\14\0\74\0\110"+ "\0\124\0\140\0\154\0\170\0\204\0\220\0\234\0\250"+ "\0\264\0\300\0\314\0\330\0\344\0\360\0\374\0\u0108"+ "\0\u0114\0\u0120\0\u012c\0\u0138\0\u0144\0\u0150\0\u015c\0\u0168"+ "\0\u0174\0\u0180\0\u018c\0\u0198\0\u01a4\0\250\0\u01b0\0\u01bc"+ "\0\u01c8\0\u01d4\0\u01e0\0\u01ec\0\u01f8\0\74\0\154\0\u0204"+ "\0\u0210\0\u021c"; private static int [] zzUnpackRowMap() { int [] result = new int[50]; int offset = 0; offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int [] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\2\1\3\1\4\7\2\1\5\1\6\15\0\2\3"+ "\1\0\1\7\1\0\1\10\2\11\1\12\1\3\2\0"+ "\1\3\1\4\1\0\1\13\1\0\1\10\2\14\1\15"+ "\1\4\2\0\1\3\1\4\1\16\1\17\1\20\1\21"+ "\2\11\1\12\1\22\2\0\1\23\1\24\7\0\1\25"+ "\2\0\2\26\7\0\1\26\2\0\1\27\1\30\7\0"+ "\1\31\3\0\1\32\7\0\1\12\2\0\1\33\1\34"+ "\7\0\1\35\2\0\1\36\1\37\7\0\1\40\2\0"+ "\1\41\1\42\7\0\1\43\13\0\1\44\2\0\1\23"+ "\1\24\7\0\1\45\13\0\1\46\2\0\2\26\7\0"+ "\1\47\2\0\1\3\1\4\1\16\1\7\1\20\1\21"+ "\2\11\1\12\1\22\2\0\2\23\1\0\1\50\1\0"+ "\1\10\2\51\1\0\1\23\2\0\1\23\1\24\1\0"+ "\1\52\1\0\1\10\2\53\1\54\1\24\2\0\1\23"+ "\1\24\1\0\1\50\1\0\1\10\2\51\1\0\1\25"+ "\2\0\2\26\1\0\1\55\2\0\1\55\2\0\1\26"+ "\2\0\2\27\1\0\1\51\1\0\1\10\2\51\1\0"+ "\1\27\2\0\1\27\1\30\1\0\1\53\1\0\1\10"+ "\2\53\1\54\1\30\2\0\1\27\1\30\1\0\1\51"+ "\1\0\1\10\2\51\1\0\1\31\3\0\1\32\1\0"+ "\1\54\2\0\3\54\1\32\2\0\2\33\1\0\1\56"+ "\1\0\1\10\2\11\1\12\1\33\2\0\1\33\1\34"+ "\1\0\1\57\1\0\1\10\2\14\1\15\1\34\2\0"+ "\1\33\1\34\1\0\1\56\1\0\1\10\2\11\1\12"+ "\1\35\2\0\2\36\1\0\1\11\1\0\1\10\2\11"+ "\1\12\1\36\2\0\1\36\1\37\1\0\1\14\1\0"+ "\1\10\2\14\1\15\1\37\2\0\1\36\1\37\1\0"+ "\1\11\1\0\1\10\2\11\1\12\1\40\2\0\2\41"+ "\1\0\1\12\2\0\3\12\1\41\2\0\1\41\1\42"+ "\1\0\1\15\2\0\3\15\1\42\2\0\1\41\1\42"+ "\1\0\1\12\2\0\3\12\1\43\4\0\1\16\6\0"+ "\1\44\2\0\1\23\1\24\1\0\1\60\1\0\1\10"+ "\2\51\1\0\1\25\2\0\2\26\1\0\1\55\2\0"+ "\1\55\2\0\1\47\2\0\2\23\7\0\1\23\2\0"+ "\2\27\7\0\1\27\2\0\2\33\7\0\1\33\2\0"+ "\2\36\7\0\1\36\2\0\2\41\7\0\1\41\2\0"+ "\2\61\7\0\1\61\2\0\2\23\7\0\1\62\2\0"+ "\2\61\1\0\1\55\2\0\1\55\2\0\1\61\2\0"+ "\2\23\1\0\1\60\1\0\1\10\2\51\1\0\1\23"+ "\1\0"; private static int [] zzUnpackTrans() { int [] result = new int[552]; int offset = 0; offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int zzUnpackTrans(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /* error codes */ private static final int ZZ_UNKNOWN_ERROR = 0; private static final int ZZ_NO_MATCH = 1; private static final int ZZ_PUSHBACK_2BIG = 2; /* error messages for the codes above */ private static final String ZZ_ERROR_MSG[] = { "Unkown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /** * ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\1\0\1\11\3\1\1\11\13\0\4\1\2\0\1\1"+ "\1\0\17\1\1\0\1\1\3\0\5\1"; private static int [] zzUnpackAttribute() { int [] result = new int[50]; int offset = 0; offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** the input device */ private java.io.Reader zzReader; /** the current state of the DFA */ private int zzState; /** the current lexical state */ private int zzLexicalState = YYINITIAL; /** this buffer contains the current text to be matched and is the source of the yytext() string */ private char zzBuffer[] = new char[ZZ_BUFFERSIZE]; /** the textposition at the last accepting state */ private int zzMarkedPos; /** the current text position in the buffer */ private int zzCurrentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int zzStartRead; /** endRead marks the last character in the buffer, that has been read from input */ private int zzEndRead; /** number of newlines encountered up to the start of the matched text */ private int yyline; /** the number of characters up to the start of the matched text */ private int yychar; /** * the number of characters from the last newline up to the start of the * matched text */ private int yycolumn; /** * zzAtBOL == true <=> the scanner is currently at the beginning of a line */ private boolean zzAtBOL = true; /** zzAtEOF == true <=> the scanner is at the EOF */ private boolean zzAtEOF; /** denotes if the user-EOF-code has already been executed */ private boolean zzEOFDone; /* user code: */ public static final int ALPHANUM = StandardTokenizer.ALPHANUM; public static final int APOSTROPHE = StandardTokenizer.APOSTROPHE; public static final int ACRONYM = StandardTokenizer.ACRONYM; public static final int COMPANY = StandardTokenizer.COMPANY; public static final int EMAIL = StandardTokenizer.EMAIL; public static final int HOST = StandardTokenizer.HOST; public static final int NUM = StandardTokenizer.NUM; public static final int CJ = StandardTokenizer.CJ; public static final int ACRONYM_DEP = StandardTokenizer.ACRONYM_DEP; public static final String [] TOKEN_TYPES = StandardTokenizer.TOKEN_TYPES; public final int yychar() { return yychar; } /** * Fills CharTermAttribute with the current token text. */ public final void getText(CharTermAttribute t) { t.copyBuffer(zzBuffer, zzStartRead, zzMarkedPos-zzStartRead); } public final void setBufferSize(int numChars) { throw new UnsupportedOperationException(); } /** * Creates a new scanner * * @param in the java.io.Reader to read input from. */ ClassicTokenizerImpl(java.io.Reader in) { this.zzReader = in; } /** * Unpacks the compressed character translation table. * * @param packed the packed character translation table * @return the unpacked character translation table */ private static char [] zzUnpackCMap(String packed) { char [] map = new char[0x10000]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < 1138) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } /** * Refills the input buffer. * * @return <code>false</code>, iff there was new input. * * @exception java.io.IOException if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { /* first: make room (if you can) */ if (zzStartRead > 0) { System.arraycopy(zzBuffer, zzStartRead, zzBuffer, 0, zzEndRead-zzStartRead); /* translate stored positions */ zzEndRead-= zzStartRead; zzCurrentPos-= zzStartRead; zzMarkedPos-= zzStartRead; zzStartRead = 0; } /* is the buffer big enough? */ if (zzCurrentPos >= zzBuffer.length) { /* if not: blow it up */ char newBuffer[] = new char[zzCurrentPos*2]; System.arraycopy(zzBuffer, 0, newBuffer, 0, zzBuffer.length); zzBuffer = newBuffer; } /* finally: fill the buffer with new input */ int numRead = zzReader.read(zzBuffer, zzEndRead, zzBuffer.length-zzEndRead); if (numRead > 0) { zzEndRead+= numRead; return false; } // unlikely but not impossible: read 0 characters, but not at end of stream if (numRead == 0) { int c = zzReader.read(); if (c == -1) { return true; } else { zzBuffer[zzEndRead++] = (char) c; return false; } } // numRead < 0 return true; } /** * Closes the input stream. */ public final void yyclose() throws java.io.IOException { zzAtEOF = true; /* indicate end of file */ zzEndRead = zzStartRead; /* invalidate buffer */ if (zzReader != null) zzReader.close(); } /** * Resets the scanner to read from a new input stream. * Does not close the old reader. * * All internal variables are reset, the old input stream * <b>cannot</b> be reused (internal buffer is discarded and lost). * Lexical state is set to <tt>ZZ_INITIAL</tt>. * * Internal scan buffer is resized down to its initial length, if it has grown. * * @param reader the new input stream */ public final void yyreset(java.io.Reader reader) { zzReader = reader; zzAtBOL = true; zzAtEOF = false; zzEOFDone = false; zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; yyline = yychar = yycolumn = 0; zzLexicalState = YYINITIAL; if (zzBuffer.length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; } /** * Returns the current lexical state. */ public final int yystate() { return zzLexicalState; } /** * Enters a new lexical state * * @param newState the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. */ public final String yytext() { return new String( zzBuffer, zzStartRead, zzMarkedPos-zzStartRead ); } /** * Returns the character at position <tt>pos</tt> from the * matched text. * * It is equivalent to yytext().charAt(pos), but faster * * @param pos the position of the character to fetch. * A value from 0 to yylength()-1. * * @return the character at position pos */ public final char yycharat(int pos) { return zzBuffer[zzStartRead+pos]; } /** * Returns the length of the matched text region. */ public final int yylength() { return zzMarkedPos-zzStartRead; } /** * Reports an error that occured while scanning. * * In a wellformed scanner (no or only correct usage of * yypushback(int) and a match-all fallback rule) this method * will only be called with things that "Can't Possibly Happen". * If this method is called, something is seriously wrong * (e.g. a JFlex bug producing a faulty scanner etc.). * * Usual syntax/scanner level error handling should be done * in error fallback rules. * * @param errorCode the code of the errormessage to display */ private void zzScanError(int errorCode) { String message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (ArrayIndexOutOfBoundsException e) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Error(message); } /** * Pushes the specified amount of characters back into the input stream. * * They will be read again by then next call of the scanning method * * @param number the number of characters to be read again. * This number must not be greater than yylength()! */ public void yypushback(int number) { if ( number > yylength() ) zzScanError(ZZ_PUSHBACK_2BIG); zzMarkedPos -= number; } /** * Resumes scanning until the next regular expression is matched, * the end of input is encountered or an I/O-Error occurs. * * @return the next token * @exception java.io.IOException if any I/O-Error occurs */ public int getNextToken() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; char [] zzBufferL = zzBuffer; char [] zzCMapL = ZZ_CMAP; int [] zzTransL = ZZ_TRANS; int [] zzRowMapL = ZZ_ROWMAP; int [] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; yychar+= zzMarkedPosL-zzStartRead; zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; } zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) zzInput = zzBufferL[zzCurrentPosL++]; else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = zzBufferL[zzCurrentPosL++]; } } int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ]; if (zzNext == -1) break zzForAction; zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ( (zzAttributes & 8) == 8 ) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { /* Break so we don't hit fall-through warning: */ break;/* ignore */ } case 11: break; case 2: { return ALPHANUM; } case 12: break; case 3: { return CJ; } case 13: break; case 4: { return HOST; } case 14: break; case 5: { return NUM; } case 15: break; case 6: { return APOSTROPHE; } case 16: break; case 7: { return COMPANY; } case 17: break; case 8: { return ACRONYM_DEP; } case 18: break; case 9: { return ACRONYM; } case 19: break; case 10: { return EMAIL; } case 20: break; default: if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; return YYEOF; } else { zzScanError(ZZ_NO_MATCH); } } } } }
package team2485.comp; import edu.wpi.first.wpilibj.AnalogChannel; import edu.wpi.first.wpilibj.Relay; import edu.wpi.first.wpilibj.Solenoid; import team2485.auto.Sequencer; import team2485.auto.SequencerFactory; /** * Represents the catapult * * @author Marty Kausas * @author Camille Considine */ public class Catapult { public static int FULLY_RETRACTED = 1, SHORT_EXTENDED = 2, LONG_EXTENDED = 3, FULLY_EXTENDED = 4; private int currentShoeState = 1; private Solenoid solenoidShoeShort, solenoidShoeLong, solenoidBoot; private Relay centerSolenoid, sideSolenoids; private Sequencer shootSequencer; private AnalogChannel sonic; /** * Constructor using {@code Solenoid} objects * * @param sideSolenoids * @param solenoidMiddle * @param solenoidShoeAdjuster1 * @param solenoidShoeAdjuster2 * @param solenoidBoot * @param sonic */ public Catapult(Relay sideSolenoids, Relay solenoidMiddle, Solenoid solenoidShoeAdjuster1, Solenoid solenoidShoeAdjuster2, Solenoid solenoidBoot, AnalogChannel sonic) { this.sideSolenoids = sideSolenoids; this.centerSolenoid = solenoidMiddle; this.solenoidShoeShort = solenoidShoeAdjuster1; this.solenoidShoeLong = solenoidShoeAdjuster2; this.solenoidBoot = solenoidBoot; this.sonic = sonic; } /** * Constructs a new Catapult using parameter solenoid ports * * @param sideSolenoids * @param solenoidMiddlePort * @param solenoidShoeAdjusterPort1 * @param solenoidShoeAdjusterPort2 * @param solenoidBoot * @param sonic */ public Catapult(int sideSolenoids, int solenoidMiddlePort, int solenoidShoeAdjusterPort1, int solenoidShoeAdjusterPort2, int solenoidBoot, AnalogChannel sonic) { this(new Relay(sideSolenoids), new Relay(solenoidMiddlePort), new Solenoid(solenoidShoeAdjusterPort1), new Solenoid(solenoidShoeAdjusterPort2), new Solenoid(solenoidBoot), sonic); } /** * * @param shotType */ public void shoot(int shotType) { if (shootSequencer == null) { shootSequencer = SequencerFactory.createShot(shotType); } } public void run() { if (shootSequencer != null) { if (shootSequencer.run()) shootSequencer = null; } } /** * Extends the center catapult piston */ public void extendOne() { sideSolenoids.set(Relay.Value.kOff); centerSolenoid.set(Relay.Value.kForward); } /** * Extends the left and right catapult pistons */ public void extendTwo() { sideSolenoids.set(Relay.Value.kOn); centerSolenoid.set(Relay.Value.kOff); } /** * Extends all three catapult pistons */ public void extendThree() { sideSolenoids.set(Relay.Value.kOn); centerSolenoid.set(Relay.Value.kForward); } public void extendRightPiston() { sideSolenoids.set(Relay.Value.kOff); centerSolenoid.set(Relay.Value.kForward); } /** * Retracts all three catapult pistons */ public void retract() { sideSolenoids.set(Relay.Value.kOff); centerSolenoid.set(Relay.Value.kOff); } public void toggleShoe() { solenoidShoeShort.set(!solenoidShoeShort.get()); } /** * Puts the shoe piston into the intake position */ public void extendShoeFull() { setShoeState(FULLY_EXTENDED); } /** * Puts the shoe piston into the shooting position */ public void retractShoeFull() { setShoeState(FULLY_RETRACTED); } public void extendShoeLongPiston() { setShoeState(LONG_EXTENDED); } public void extendShoeShortPiston() { setShoeState(SHORT_EXTENDED); } /** * Extends the boot for a close pass */ public void extendBoot() { solenoidBoot.set(true); } /** * Retracts the boot after a close pass */ public void retractBoot() { solenoidBoot.set(false); } public boolean inCatapult() { return sonic.getValue() < 20; } public boolean shoeShortExtended() { return solenoidShoeShort.get(); } public boolean shoeLongExtended() { return solenoidShoeLong.get(); } public void reset() { retract(); retractBoot(); retractShoeFull(); } public void setShoeState(int state) { if (state >= FULLY_RETRACTED && state <= FULLY_EXTENDED) { currentShoeState = state; switch (currentShoeState) { // fully retracted case 1: solenoidShoeShort.set(false); solenoidShoeLong.set(false); break; // short piston extended case 2: solenoidShoeShort.set(true); solenoidShoeLong.set(false); break; // long piston extended case 3: solenoidShoeShort.set(false); solenoidShoeLong.set(true); break; // full shoe extended case 4: solenoidShoeShort.set(true); solenoidShoeLong.set(true); break; } } } public int getShoeState() { return currentShoeState; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.fortress.core.model; import java.io.Serializable; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.UUID; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import org.apache.commons.lang.StringUtils; /* ## OC2: Fortress Permission Structural Object Class objectclass ( 1.3.6.1.4.1.38088.2.2 NAME 'ftObject' DESC 'Fortress Permission Object Class' SUP organizationalunit STRUCTURAL MUST ( ftId $ ftObjNm ) MAY ( ftType ) ) */ /** * All entities ({@link User}, {@link Role}, {@link Permission}, {@link PwPolicy} {@link SDSet} etc...) are used to carry * data between three Fortress layers, starting with the (1) Manager layer down thru middle (2) Process layer and it's * processing rules into (3) DAO layer where persistence with the LDAP server occurs. * <h3></h3> * <h4>Fortress Processing Layers</h4> * <ol> * <li> * Manager layer: {@link org.apache.directory.fortress.core.impl.AdminMgrImpl}, * {@link org.apache.directory.fortress.core.impl.AccessMgrImpl}, * {@link org.apache.directory.fortress.core.impl.ReviewMgrImpl},... * </li> * <li> * Process layer: {@link org.apache.directory.fortress.core.impl.UserP}, * {@link org.apache.directory.fortress.core.impl.RoleP}, {@link org.apache.directory.fortress.core.impl.PermP},... * </li> * <li> * DAO layer: {@link org.apache.directory.fortress.core.impl.UserDAO}, * {@link org.apache.directory.fortress.core.impl.RoleDAO}, {@link org.apache.directory.fortress.core.impl.PermDAO},... * </li> * </ol> * Fortress clients first instantiate and populate a data entity before invoking any of the Manager APIs. The caller must * provide enough information to uniquely identity the entity target within ldap.<br> * For example, this entity requires {@link #setObjName} and {@link #setOpName} attributes set before passing into * {@link org.apache.directory.fortress.core.impl.AccessMgrImpl} APIs. * Create methods usually require more attributes (than Read) due to constraints enforced between entities. * <h4>Permission entity attribute usages include</h4> * <ul> * <li> * {@link #setObjName} and {@link #setOpName} attributes set before calling * {@link org.apache.directory.fortress.core.impl.AccessMgrImpl#checkAccess(Session, Permission)}. * </li> * <li> * {@link #getRoles} may be set after calling * {@link org.apache.directory.fortress.core.impl.ReviewMgrImpl#readPermission(Permission)} or * {@link org.apache.directory.fortress.core.impl.AccessMgrImpl#sessionPermissions(Session)}. * </li> * <li> * {@link #getUsers} may be set after calling * {@link org.apache.directory.fortress.core.impl.ReviewMgrImpl#readPermission(Permission)} or * {@link org.apache.directory.fortress.core.impl.AccessMgrImpl#sessionPermissions(Session)}. * </li> * </ul> * <h4>More Permission entity notes</h4> * <ul> * <li> * The unique key to locate a Permission entity (which is required for all authZ requests) is {@link Permission#objName} * and {@link Permission#opName}.<br> * </li> * <li> * The Permission entity is used to target function points within computer programs needing authorization. This * permission model allows a one-to-many relationship between the objects {@link PermObj} and operations * {@link Permission}. * <p> * <img src="../doc-files/RbacCore.png" alt=""> * </li> * <li> * The object to operation pairings enable application resources to be mapped to Fortress permissions in a way that is * natural for object oriented programming. * </li> * <li>Permissions = Object {@link PermObj} 1<->* Operations {@link Permission}</li> * <li>Permissions in Fortress may also be assigned directly to {@link #users}.</li> * <li> * Objects {@link #objName}, Operations {@link #opName}, Roles {@link #roles}, Users {@link #users} are not case * sensitive for reads or searches. * </li> * </ul> * <p> * The application entity that requires authorization will be mapped to the {@link PermObj} entity and the application's * methods or operation names will be mapped to {@link Permission} entities. * For example, the application entity 'ShoppingCart' has 5 operations - 'create', 'read', 'update', 'delete' and 'checkout'. * The following code will create the permissions and perform the necessary grants. * <pre> * try * { * // Instantiate the AdminMgr first * AdminMgr adminMgr = AdminMgrFactory.createInstance(); * * // Now Instantiate the Object * PermObj shoppingCart = new PermObj("ShoppingCart", "KillerBikes.com"); * * // Add it to the directory * adminMgr.addPermObj(shoppingCart); * * // Now create the permission operations and grant to applicable roles: * Permission create = new Permission(shoppingCart.getObjName(), "create"); * adminMgr.addPermission(create); * adminMgr.grantPermission(create, new Role("Customer")); * * Permission read = new Permission(shoppingCart.getObjName(), "read"); * adminMgr.addPermission(read); * adminMgr.grantPermission(read, new Role("Customer")); * * Permission update = new Permission(shoppingCart.getObjName(), "update"); * adminMgr.addPermission(update); * adminMgr.grantPermission(update, new Role("Admin")); * * Permission delete = new Permission(shoppingCart.getObjName(), "delete"); * adminMgr.addPermission(delete); * adminMgr.grantPermission(delete, new Role("Manager")); * * Permission checkout = new Permission(shoppingCart.getObjName(), "checkout"); * adminMgr.addPermission(checkout); * adminMgr.grantPermission(delete, new Role("Customer")); * } * catch (SecurityException ex) * { * // log or throw * } * </pre> * <h4>Notes on the shopping cart example</h4> * <ul> * <li> * {@link User} that activate 'Manager' role into their Sessions will be allowed access to 'ShoppingCart.delete' * permission. * </li> * <li>{@link User} that activate 'Admin' role may perform 'ShoppingCart.update'.</li> * <li> * {@link User} with 'Customer' role may perform the 'ShoppingCart.create' 'ShoppingCart.read and * 'ShoppingCart.checkout'. * </li> * <li>{@link Role}s must exist in ldap before assignment here, see javadoc {@link Role} for details.</li> * </ul> * <h4>Permission Schema</h4> * This Permission entity extends a single standard ldap structural object class, {@code organizationalRole} with * one extension structural class, {@code ftOperation}, and two auxiliary object classes, {@code ftProperties}, {@code ftMods}. * The following 3 LDAP object classes will be mapped into this entity: * <p> * 1. {@code ftOperation} STRUCTURAL Object Class is assigned roles and/or users which grants permissions which can be later * checked using either 'checkAccess' or 'sessionPermissions APIs both methods that reside in the 'AccessMgrImpl' class. * <pre> * ------------------------------------------ * Fortress Operation Structural Object Class * objectclass ( 1.3.6.1.4.1.38088.2.3 * NAME 'ftOperation' * DESC 'Fortress Permission Operation Structural Object Class' * SUP organizationalrole * STRUCTURAL * MUST ( * ftId $ * ftPermName $ * ftObjNm $ * ftOpNm * ) * MAY ( * ftObjId $ * ftRoles $ * ftUsers $ * ftType * ) * ) * ------------------------------------------ * </pre> * 2. {@code ftProperties} AUXILIARY Object Class is used to store optional client or otherwise custom name/value pairs on * target entity.<br> * <code># This aux object class can be used to store custom attributes.</code><br> * <code># The properties collections consist of name/value pairs and are not constrainted by Fortress.</code><br> * <pre> * ------------------------------------------ * AC2: Fortress Properties Auxiliary Object Class * objectclass ( 1.3.6.1.4.1.38088.3.2 * NAME 'ftProperties' * DESC 'Fortress Properties AUX Object Class' * AUXILIARY * MAY ( * ftProps * ) * ) * ------------------------------------------ * </pre> * 3. {@code ftMods} AUXILIARY Object Class is used to store Fortress audit variables on target entity. * <pre> * ------------------------------------------ * Fortress Audit Modification Auxiliary Object Class * objectclass ( 1.3.6.1.4.1.38088.3.4 * NAME 'ftMods' * DESC 'Fortress Modifiers AUX Object Class' * AUXILIARY * MAY ( * ftModifier $ * ftModCode $ * ftModId * ) * ) * ------------------------------------------ * </pre> * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @XmlRootElement(name = "fortPermission") @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "permission", propOrder = { "objName", "opName", "objId", "description", "abstractName", "internalId", "type", "users", "roles", "props", "dn", "admin", "paSets" }) public class Permission extends FortEntity implements Serializable { /** Default serialVersionUID */ private static final long serialVersionUID = 1L; private boolean admin; private String internalId; private String opName; private String objName; private String objId; private String abstractName; private String type; private String dn; private String description; @XmlElement(nillable = true) private Props props = new Props(); //private Properties props; @XmlElement(nillable = true) private Set<String> roles; @XmlElement(nillable = true) private Set<String> users; private Set<String> paSets; /** * This constructor is commonly used to create Permission that is a target for authorization API. * * @param objName maps to 'ftObjNm' attribute in 'ftOperation' object class. * @param opName maps to 'ftOpNm' attribute in 'ftOperation' object class. */ public Permission( String objName, String opName ) { this.objName = objName; this.opName = opName; } /** * Default constructor is used by internal Fortress classes and not intended for external use. */ public Permission() { } /** * Constructor is used for APIs that do not require opName for example ARBAC canGrant/canRevoke. * * @param objName maps to 'ftObjNm' attribute in 'ftOperation' object class. */ public Permission( String objName ) { this.objName = objName; } /** * This constructor adds the objId which is used for creating Permissions that have an identity. * * @param objName maps to 'ftObjNm' attribute in 'ftOperation' object class. * @param opName maps to 'ftOpNm' attribute in 'ftOperation' object class. * @param objId maps to 'ftObjId' attribute in 'ftOperation' object class. */ public Permission( String objName, String opName, String objId ) { this.objName = objName; this.opName = opName; this.objId = objId; } /** * This constructor adds the admin flag which is used to process as Administrative permission. * * @param objName maps to 'ftObjNm' attribute in 'ftOperation' object class. * @param opName maps to 'ftOpNm' attribute in 'ftOperation' object class. * @param admin attribute is used to specify the Permission is to be stored and processed in the Administrative RBAC data sets. */ public Permission( String objName, String opName, boolean admin ) { this.objName = objName; this.opName = opName; this.admin = admin; } /** * Determine if this Permission is for RBAC or ARBAC processing. * * @return 'true' indicates administrative permission. */ public boolean isAdmin() { return admin; } /** * Set will determine if this Permission is for RBAC or ARBAC processing. * * @param admin contains is 'true' if ARBAC permission.. */ public void setAdmin( boolean admin ) { this.admin = admin; } /** * This attribute is required but is set automatically by Fortress DAO class before object is persisted to ldap. * This generated internal id is associated with Permission. This method is used by DAO class and * is not available to outside classes. The generated attribute maps to 'ftId' in 'ftOperation' object class. */ public void setInternalId() { // generate a unique id that will be used for the internal id: UUID uuid = UUID.randomUUID(); this.internalId = uuid.toString(); } /** * Set the internal id that is associated with Permission. This method is used by DAO class and * is generated automatically by Fortress. Attribute stored in LDAP cannot be changed by external caller. * This method can be used by client for search purposes only. * * @param internalId maps to 'ftId' in 'ftObject' object class. */ public void setInternalId( String internalId ) { this.internalId = internalId; } /** * Return the internal id that is associated with Permission. This attribute is generated automatically * by Fortress when new PermObj is added to directory and is not known or changeable by external client. * * @return attribute maps to 'ftId' in 'ftOperation' object class. */ public String getInternalId() { return internalId; } /** * Get the Permission operation name. This is used to specify method name - i.e. Create, Read, Update, Delete, ... * * @return opName maps to 'ftOpNm' attribute in 'ftOperation' object class. */ public String getOpName() { return opName; } /** * Set the Permission operation name. This is used to specify method name - i.e. Create, Read, Update, Delete, ... * * @param opName maps to 'ftOpNm' attribute in 'ftOperation' object class. */ public void setOpName( String opName ) { this.opName = opName; } /** * Get the authorization target's object name. This is typically mapped to the class name for component * that is the target for Fortress authorization check. For example 'PatientRelationshipInquire'. * * @return the name of the object which maps to 'ftObjNm' attribute in 'ftOperation' object class. */ public String getObjName() { return this.objName; } /** * This attribute is required and sets the authorization target object name. This name is typically derived from the * class name for component that is the target for Fortress authorization check. For example 'CustomerCheckOutPage'. * * @param objName The target object name */ public void setObjName( String objName ) { this.objName = objName; } /** * Return the Permission's abstract name which is the value of objName concatenated with OpName, i.e. 'Patient.checkin' * This value is automatically generated by the Fortress DAO class. * * @return abstractName maps to 'ftPermName' attribute in 'ftOperation' object class. */ public String getAbstractName() { return abstractName; } /** * Set the Permission's abstract name which is the value of objName concatenated with OpName, i.e. 'Patient.checkin' * This value is automatically generated by the Fortress DAO class and value will be ignored if set by external client. * * @param abstractName maps to 'ftPermName' attribute in 'ftOperation' object class. */ public void setAbstractName( String abstractName ) { this.abstractName = abstractName; } /** * Get the optional type name which is an unconstrained attribute on Permission entity. * * @return type maps to 'ftType' attribute in 'ftOperation' object class. */ public String getType() { return type; } /** * Set the optional type name which is an unconstrained attribute on Permission entity. * * @param type maps to 'ftType' attribute in 'ftOperation' object class. */ public void setType( String type ) { this.type = type; } /** * Get optional objId attribute which can be used to tag a Permission object with an identity, i.e. objName='Customer', objId='12345'. * This value is not constrained by any other object. * * @return maps to 'ftObjectId' attribute in 'ftOperation' object class. */ public String getObjId() { return objId; } /** * Set optional objId which can be used to tag a Permission object with an identity, i.e. objName='Account', objId='09876543'. * This value is not constrained by any other object. * * @param objId maps to 'ftObjectId' attribute in 'ftOperation' object class. */ public void setObjId( String objId ) { this.objId = objId; } /** * Add a Role name to list of Roles that are valid for this Permission. This is optional attribute. * * @param role maps to 'ftRoles' attribute in 'ftOperation' object class. */ public void setRole( String role ) { if ( roles == null ) { roles = new TreeSet<>( String.CASE_INSENSITIVE_ORDER ); } this.roles.add( role ); } /** * Delete a Role name from list of Roles that are valid for this Permission. * * @param role maps to 'ftRoles' attribute in 'ftOperation' object class. */ public void delRole( String role ) { if ( this.roles != null ) { this.roles.remove( role ); } } /** * Return the collection of optional Roles that have been loaded into this entity. This is stored as a multi-occurring * attribute of Role names on the 'ftOperation' object class. * * @return Set containing the roles which maps to 'ftRoles' attribute in 'ftOperation' object class. */ public Set<String> getRoles() { return this.roles; } /** * Set the collection of optional Roles that have been loaded into this entity. This is stored as a multi-occurring * attribute of Role names on the 'ftOperation' object class. * * @param roles maps to 'ftRoles' attribute in 'ftOperation' object class. */ public void setRoles( Set<String> roles ) { this.roles = roles; } /** * Add a UserId to list of Users that are valid for this Permission. This is optional attribute. * * @param user maps to 'ftUsers' attribute in 'ftOperation' object class. */ public void setUser( String user ) { if ( users == null ) { users = new TreeSet<>( String.CASE_INSENSITIVE_ORDER ); } this.users.add( user ); } /** * Return the collection of optional Users that have been loaded into this entity. This is stored as a multi-occurring * attribute of ftUsers on the 'ftOperation' object class. * * @return Set containing the Users which maps to 'ftUsers' attribute in 'ftOperation' object class. */ public Set<String> getUsers() { return this.users; } /** * Set the collection of optional Users that have been loaded into this entity. This is stored as a multi-occurring * attribute of userIds on the 'ftOperation' object class. * * @param users maps to 'ftUsers' attribute in 'ftOperation' object class. */ public void setUsers( Set<String> users ) { this.users = users; } public String getDn() { return dn; } public void setDn( String dn ) { this.dn = dn; } /** * Return the description field on this entity. The description is often used as a human readable label for the permission. * @return String containing the description. */ public String getDescription() { return description; } /** * Set the optional description field on this entity. The description is used as a human readable label for the permission. * * @param description String contains the description. */ public void setDescription( String description ) { this.description = description; } /** * Gets the value of the Props property. This method is used by Fortress Core and Rest and should not be called by external programs. * * @return * possible object is * {@link Props } * */ public Props getProps() { return props; } /** * Sets the value of the Props property. This method is used by Fortress Core and Rest and should not be called by external programs. * * @param value * allowed object is * {@link Props } * */ public void setProps( Props value ) { this.props = value; } /** * Add name/value pair to list of properties associated with Permission. These values are not constrained by Fortress. * Properties are optional. * * @param key contains property name and maps to 'ftProps' attribute in 'ftProperties' aux object class. * @param value The property value */ public void addProperty( String key, String value ) { Props.Entry entry = new Props.Entry(); entry.setKey( key ); entry.setValue( value ); this.props.getEntry().add( entry ); } /** * Get a name/value pair attribute from list of properties associated with Permission. These values are not constrained by Fortress. * Properties are optional. * * @param key contains property name and maps to 'ftProps' attribute in 'ftProperties' aux object class. * @return value containing name/value pair that maps to 'ftProps' attribute in 'ftProperties' aux object class. */ public String getProperty( String key ) { List<Props.Entry> props = this.props.getEntry(); Props.Entry keyObj = new Props.Entry(); keyObj.setKey( key ); String value = null; int indx = props.indexOf( keyObj ); if ( indx != -1 ) { Props.Entry entry = props.get( props.indexOf( keyObj ) ); value = entry.getValue(); } return value; } /** * Add new collection of name/value pairs to attributes associated with Permission. These values are not constrained by Fortress. * Properties are optional. * * @param props contains collection of name/value pairs and maps to 'ftProps' attribute in 'ftProperties' aux object class. */ public void addProperties( Properties props ) { if ( props != null ) { for ( Enumeration<?> e = props.propertyNames(); e.hasMoreElements(); ) { // This LDAP attr is stored as a name-value pair separated by a ':'. String key = ( String ) e.nextElement(); String val = props.getProperty( key ); addProperty( key, val ); } } } /** * Return the collection of name/value pairs to attributes associated with Permission. These values are not constrained by Fortress. * Properties are optional. * * @return Properties contains collection of name/value pairs and maps to 'ftProps' attribute in 'ftProperties' aux object class. */ public Properties getProperties() { Properties properties = null; List<Props.Entry> props = this.props.getEntry(); if ( props.size() > 0 ) { properties = new Properties(); //int size = props.size(); for ( Props.Entry entry : props ) { String key = entry.getKey(); String val = entry.getValue(); properties.setProperty( key, val ); } } return properties; } /** * Matches the objName, opName and objId from two Permission entities. * * @param o contains a Permission entity. * @return boolean indicating both Permissions contain matching objName and opName attributes. */ @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } Permission that = ( Permission ) o; if ( StringUtils.isNotEmpty( objId ) ? !objId.equalsIgnoreCase( that.objId ) : StringUtils.isNotEmpty( that.objId ) ) { return false; } if ( objName != null ? !objName.equalsIgnoreCase( that.objName ) : that.objName != null ) { return false; } if ( opName != null ? !opName.equalsIgnoreCase( that.opName ) : that.opName != null ) { return false; } return true; } @Override public int hashCode() { int result = ( admin ? 1 : 0 ); result = 31 * result + ( internalId != null ? internalId.hashCode() : 0 ); result = 31 * result + ( opName != null ? opName.hashCode() : 0 ); result = 31 * result + ( objName != null ? objName.hashCode() : 0 ); result = 31 * result + ( objId != null ? objId.hashCode() : 0 ); result = 31 * result + ( abstractName != null ? abstractName.hashCode() : 0 ); result = 31 * result + ( type != null ? type.hashCode() : 0 ); result = 31 * result + ( dn != null ? dn.hashCode() : 0 ); result = 31 * result + ( description != null ? description.hashCode() : 0 ); result = 31 * result + ( props != null ? props.hashCode() : 0 ); result = 31 * result + ( roles != null ? roles.hashCode() : 0 ); result = 31 * result + ( users != null ? users.hashCode() : 0 ); return result; } @Override public String toString() { return "Permission{" + "objName='" + objName + '\'' + ", opName='" + opName + '\'' + ", objId='" + objId + '\'' + '}'; } public Set<String> getPaSets() { if ( paSets == null ) { paSets = new HashSet<String>(); } return paSets; } public void setPaSets( Set<String> paSets ){ this.paSets = paSets; } public void setPaSetName(String paSet) { if ( paSets == null ) { paSets = new HashSet<String>(); } this.paSets.add( paSet ); } }
package nsit.app.com.nsitapp.data; import android.content.ContentProvider; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; public class ContestProvider extends ContentProvider { private static final UriMatcher sUriMatcher = buildUriMatcher(); private static final int CONTEST = 100; private static final int CONTEST_WITH_ID = 101; private static final int CONTEST_WITH_SOURCE = 200; private static final SQLiteQueryBuilder sQueryBuilder; static { sQueryBuilder = new SQLiteQueryBuilder(); sQueryBuilder.setTables(ContestContract.ContestEntry.TABLE_NAME); } private ContestDBHelper mOpenHelper; private static UriMatcher buildUriMatcher() { final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); final String authority = ContestContract.CONTENT_AUTHORITY; matcher.addURI(authority,ContestContract.PATH_CONTEST,CONTEST); matcher.addURI(authority,ContestContract.PATH_CONTEST + "/#",CONTEST_WITH_ID); matcher.addURI(authority,ContestContract.PATH_CONTEST + "/*",CONTEST_WITH_SOURCE); return matcher; } @Override public boolean onCreate() { mOpenHelper = new ContestDBHelper(getContext()); return true; } @Override public String getType(Uri uri) { int match = sUriMatcher.match(uri); switch (match) { case CONTEST: return ContestContract.ContestEntry.CONTENT_DIR_TYPE; case CONTEST_WITH_ID: return ContestContract.ContestEntry.CONTENT_ITEM_TYPE; case CONTEST_WITH_SOURCE: return ContestContract.ContestEntry.CONTENT_DIR_TYPE; default: throw new UnsupportedOperationException("Unknown uri : " + uri); } } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { int match = sUriMatcher.match(uri); Cursor retCursor; switch (match) { case CONTEST: retCursor = getContest(projection,selection,selectionArgs,sortOrder); break; case CONTEST_WITH_SOURCE: retCursor = getContestWithSource(uri,projection,sortOrder); break; case CONTEST_WITH_ID: retCursor = getContestWithID(uri,projection,sortOrder); break; default: throw new UnsupportedOperationException("Unknown uri : " + uri); } retCursor.setNotificationUri(getContext().getContentResolver(),uri); return retCursor; } private Cursor getContest(String[] projection, String selection, String[] selectionArgs, String sortOrder) { return sQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder ); } private Cursor getContestWithSource(Uri uri,String[] projection, String sortOrder) { String source = ContestContract.ContestEntry.getSourceFromUri(uri); String selection = ContestContract.ContestEntry.COLUMN_SOURCE + " = ? "; String[] selectionArgs = new String[] {source}; return sQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder); } private Cursor getContestWithID(Uri uri,String[] projection, String sortOrder) { int id = ContestContract.ContestEntry.getIDFromUri(uri); String selection = ContestContract.ContestEntry._ID + " = ? "; String[] selectionArgs = new String[] {Integer.toString(id)}; return sQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder); } @Override public Uri insert(Uri uri, ContentValues contentValues) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = sUriMatcher.match(uri); Uri returnUri; switch(match) { case CONTEST: { long _id = db.insert(ContestContract.ContestEntry.TABLE_NAME,null,contentValues); if (_id>0) { returnUri = ContestContract.ContestEntry.buildContestUriWithId(_id); } else { throw new SQLException("Failed to insert row into + "+uri); } break; } default: throw new UnsupportedOperationException("Unknown uri : " + uri); } getContext().getContentResolver().notifyChange(uri,null); return returnUri; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = sUriMatcher.match(uri); int rowsDeleted; switch(match) { case CONTEST: { rowsDeleted = db.delete(ContestContract.ContestEntry.TABLE_NAME, selection, selectionArgs); break; } default: throw new UnsupportedOperationException("Unknown uri : " + uri); } if (rowsDeleted!=0) getContext().getContentResolver().notifyChange(uri,null); return rowsDeleted; } @Override public int update(Uri uri, ContentValues newValues, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = sUriMatcher.match(uri); int rowsUpdated; switch(match) { case CONTEST: { rowsUpdated = db.update(ContestContract.ContestEntry.TABLE_NAME, newValues, selection, selectionArgs); break; } default: throw new UnsupportedOperationException("Unknown uri : " + uri); } if (rowsUpdated!=0) getContext().getContentResolver().notifyChange(uri,null); return rowsUpdated; } @Override public int bulkInsert(Uri uri, ContentValues[] values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case CONTEST: db.beginTransaction(); int returnCount = 0; try { for (ContentValues value : values) { if (value.getAsString(ContestContract.ContestEntry.COLUMN_DESCRIPTION).equals("")) { value.remove(ContestContract.ContestEntry.COLUMN_DESCRIPTION); value.put(ContestContract.ContestEntry.COLUMN_DESCRIPTION, "No description available."); } long _id = db.insert(ContestContract.ContestEntry.TABLE_NAME, null, value); if (_id != -1) { returnCount++; } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } getContext().getContentResolver().notifyChange(uri, null); return returnCount; default: return super.bulkInsert(uri, values); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.aws.kinesis.stream; import com.amazonaws.ClientConfiguration; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessorFactory; import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream; import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration; import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker; import com.amazonaws.services.kinesis.clientlibrary.lib.worker.WorkerStateChangeListener; import org.apache.nifi.controller.ControllerService; import org.apache.nifi.json.JsonRecordSetWriter; import org.apache.nifi.json.JsonTreeReader; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors; import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService; import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderService; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.util.MockProcessContext; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Before; import org.junit.Test; import java.net.InetAddress; import java.net.UnknownHostException; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; public class TestConsumeKinesisStream { private final TestRunner runner = TestRunners.newTestRunner(ConsumeKinesisStream.class); @Before public void setUp() throws InitializationException { runner.setProperty(ConsumeKinesisStream.KINESIS_STREAM_NAME, "test-stream"); runner.setProperty(ConsumeKinesisStream.APPLICATION_NAME, "test-application"); // use anonymous credentials by default final ControllerService credentialsProvider = new AWSCredentialsProviderControllerService(); runner.addControllerService("credentials-provider", credentialsProvider); runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true"); runner.assertValid(credentialsProvider); runner.enableControllerService(credentialsProvider); runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-provider"); runner.assertValid(); } @Test public void testValidWithCredentials() throws InitializationException { final ControllerService credentialsProvider = new AWSCredentialsProviderControllerService(); runner.addControllerService("credentials-provider", credentialsProvider); runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.ACCESS_KEY, "access-key"); runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.SECRET_KEY, "secret-key"); runner.assertValid(credentialsProvider); runner.enableControllerService(credentialsProvider); runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-provider"); runner.assertValid(); ((ConsumeKinesisStream) runner.getProcessor()).onScheduled(runner.getProcessContext()); } @Test public void testMissingMandatoryProperties() { runner.removeProperty(ConsumeKinesisStream.KINESIS_STREAM_NAME); runner.removeProperty(ConsumeKinesisStream.APPLICATION_NAME); runner.removeProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::run); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 3 validation failures:\n" + "'%s' is invalid because %s is required\n" + "'%s' is invalid because %s is required\n" + "'%s' is invalid because %s is required\n", ConsumeKinesisStream.KINESIS_STREAM_NAME.getDisplayName(), ConsumeKinesisStream.KINESIS_STREAM_NAME.getDisplayName(), ConsumeKinesisStream.APPLICATION_NAME.getDisplayName(), ConsumeKinesisStream.APPLICATION_NAME.getDisplayName(), ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE.getDisplayName(), ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE.getDisplayName() ))); } @Test public void testInvalidProperties() { runner.setProperty(ConsumeKinesisStream.APPLICATION_NAME, " "); runner.setProperty(ConsumeKinesisStream.TIMESTAMP_FORMAT, "not-valid-format"); runner.setProperty(ConsumeKinesisStream.RETRY_WAIT, "not-a-long"); runner.setProperty(ConsumeKinesisStream.NUM_RETRIES, "not-an-int"); runner.setProperty(ConsumeKinesisStream.FAILOVER_TIMEOUT, "not-a-period"); runner.setProperty(ConsumeKinesisStream.GRACEFUL_SHUTDOWN_TIMEOUT, "not-a-period"); runner.setProperty(ConsumeKinesisStream.CHECKPOINT_INTERVAL, "not-a-long"); runner.setProperty(ConsumeKinesisStream.REPORT_CLOUDWATCH_METRICS, "not-a-boolean"); runner.setProperty(ConsumeKinesisStream.DYNAMODB_ENDPOINT_OVERRIDE, "not-a-url"); runner.setProperty(ConsumeKinesisStream.INITIAL_STREAM_POSITION, "not-an-enum-match"); runner.setProperty(ConsumeKinesisStream.RECORD_READER, "not-a-reader"); runner.setProperty(ConsumeKinesisStream.RECORD_WRITER, "not-a-writer"); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::run); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 14 validation failures:\n" + "'%s' validated against ' ' is invalid because %s must contain at least one character that is not white space\n" + "'%s' validated against 'not-a-reader' is invalid because Property references a Controller Service that does not exist\n" + "'%s' validated against 'not-a-writer' is invalid because Property references a Controller Service that does not exist\n" + "'%s' validated against 'not-a-url' is invalid because Not a valid URL\n" + "'%s' validated against 'not-an-enum-match' is invalid because Given value not found in allowed set '%s, %s, %s'\n" + "'%s' validated against 'not-valid-format' is invalid because Must be a valid java.time.DateTimeFormatter pattern, e.g. %s\n" + "'%s' validated against 'not-a-period' is invalid because Must be of format <duration> <TimeUnit> where <duration> is a non-negative integer and " + "TimeUnit is a supported Time Unit, such as: nanos, millis, secs, mins, hrs, days\n" + "'%s' validated against 'not-a-period' is invalid because Must be of format <duration> <TimeUnit> where <duration> is a non-negative integer and " + "TimeUnit is a supported Time Unit, such as: nanos, millis, secs, mins, hrs, days\n" + "'%s' validated against 'not-a-long' is invalid because Must be of format <duration> <TimeUnit> where <duration> is a non-negative integer and " + "TimeUnit is a supported Time Unit, such as: nanos, millis, secs, mins, hrs, days\n" + "'%s' validated against 'not-an-int' is invalid because not a valid integer\n" + "'%s' validated against 'not-a-long' is invalid because Must be of format <duration> <TimeUnit> where <duration> is a non-negative integer and " + "TimeUnit is a supported Time Unit, such as: nanos, millis, secs, mins, hrs, days\n" + "'%s' validated against 'not-a-boolean' is invalid because Given value not found in allowed set 'true, false'\n" + "'%s' validated against 'not-a-reader' is invalid because Invalid Controller Service: not-a-reader is not a valid Controller Service Identifier\n" + "'%s' validated against 'not-a-writer' is invalid because Invalid Controller Service: not-a-writer is not a valid Controller Service Identifier\n", ConsumeKinesisStream.APPLICATION_NAME.getName(), ConsumeKinesisStream.APPLICATION_NAME.getName(), ConsumeKinesisStream.RECORD_READER.getDisplayName(), ConsumeKinesisStream.RECORD_WRITER.getDisplayName(), ConsumeKinesisStream.DYNAMODB_ENDPOINT_OVERRIDE.getName(), ConsumeKinesisStream.INITIAL_STREAM_POSITION.getName(), ConsumeKinesisStream.LATEST.getDisplayName(), ConsumeKinesisStream.TRIM_HORIZON.getDisplayName(), ConsumeKinesisStream.AT_TIMESTAMP.getDisplayName(), ConsumeKinesisStream.TIMESTAMP_FORMAT.getName(), RecordFieldType.TIMESTAMP.getDefaultFormat(), ConsumeKinesisStream.FAILOVER_TIMEOUT.getName(), ConsumeKinesisStream.GRACEFUL_SHUTDOWN_TIMEOUT.getName(), ConsumeKinesisStream.CHECKPOINT_INTERVAL.getName(), ConsumeKinesisStream.NUM_RETRIES.getName(), ConsumeKinesisStream.RETRY_WAIT.getName(), ConsumeKinesisStream.REPORT_CLOUDWATCH_METRICS.getName(), ConsumeKinesisStream.RECORD_READER.getDisplayName(), ConsumeKinesisStream.RECORD_WRITER.getDisplayName() ))); } @Test public void testMissingStreamPositionTimestamp() { runner.setProperty(ConsumeKinesisStream.INITIAL_STREAM_POSITION, InitialPositionInStream.AT_TIMESTAMP.toString()); runner.removeProperty(ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::run); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 1 validation failures:\n" + "'%s' is invalid because %s must be provided when %s is %s\n", ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP.getName(), ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP.getDisplayName(), ConsumeKinesisStream.INITIAL_STREAM_POSITION.getDisplayName(), InitialPositionInStream.AT_TIMESTAMP ))); } @Test public void testInvalidStreamPositionTimestamp() { runner.setProperty(ConsumeKinesisStream.INITIAL_STREAM_POSITION, InitialPositionInStream.AT_TIMESTAMP.toString()); runner.setProperty(ConsumeKinesisStream.TIMESTAMP_FORMAT, "yyyy-MM-dd"); runner.setProperty(ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP, "12:00:00"); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::run); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 1 validation failures:\n" + "'%s' is invalid because %s must be parsable by %s\n", ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP.getName(), ConsumeKinesisStream.STREAM_POSITION_TIMESTAMP.getDisplayName(), ConsumeKinesisStream.TIMESTAMP_FORMAT.getDisplayName() ))); } @Test public void testInvalidRecordReaderWithoutRecordWriter() throws InitializationException { final ControllerService service = new JsonTreeReader(); runner.addControllerService("record-reader", service); runner.enableControllerService(service); runner.setProperty(ConsumeKinesisStream.RECORD_READER, "record-reader"); runner.removeProperty(ConsumeKinesisStream.RECORD_WRITER); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::assertValid); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 1 validation failures:\n" + "'%s' is invalid because %s must be set if %s is set in order to write FlowFiles as Records.\n", ConsumeKinesisStream.RECORD_WRITER.getName(), ConsumeKinesisStream.RECORD_WRITER.getDisplayName(), ConsumeKinesisStream.RECORD_READER.getDisplayName() ))); } @Test public void testInvalidRecordWriterWithoutRecordReader() throws InitializationException { final ControllerService service = new JsonRecordSetWriter(); runner.addControllerService("record-writer", service); runner.enableControllerService(service); runner.setProperty(ConsumeKinesisStream.RECORD_WRITER, "record-writer"); runner.removeProperty(ConsumeKinesisStream.RECORD_READER); runner.assertNotValid(); final AssertionError assertionError = assertThrows(AssertionError.class, runner::assertValid); assertThat(assertionError.getMessage(), equalTo(String.format("Processor has 1 validation failures:\n" + "'%s' is invalid because %s must be set if %s is set in order to write FlowFiles as Records.\n", ConsumeKinesisStream.RECORD_READER.getName(), ConsumeKinesisStream.RECORD_READER.getDisplayName(), ConsumeKinesisStream.RECORD_WRITER.getDisplayName() ))); } @Test public void testRunWorkerWithCredentials() throws UnknownHostException, InitializationException, InterruptedException { runWorker(true, false); } @Test public void testRunWorkerUnexpectedShutdown() throws UnknownHostException, InitializationException, InterruptedException { runWorker(true, true); } @Test public void testRunWorkerWithoutCredentials() throws UnknownHostException, InitializationException, InterruptedException { runWorker(false, false); } @Test public void testInvalidDynamicKCLProperties() { // blank properties runner.setProperty("", "empty"); runner.setProperty(" ", "blank"); // invalid property names runner.setProperty("withPrefixNotAllowed", "a-value"); runner.setProperty("unknownProperty", "a-third-value"); runner.setProperty("toString", "cannot-call"); // invalid property names (cannot use nested/indexed/mapped properties via BeanUtils) runner.setProperty("no.allowed", "no-."); runner.setProperty("no[allowed", "no-["); runner.setProperty("no]allowed", "no-]"); runner.setProperty("no(allowed", "no-("); runner.setProperty("no)allowed", "no-)"); // can't override static properties runner.setProperty("regionName", Regions.AF_SOUTH_1.getName()); runner.setProperty("timestampAtInitialPositionInStream", "2021-01-01 00:00:00"); runner.setProperty("initialPositionInStream", "AT_TIMESTAMP"); runner.setProperty("dynamoDBEndpoint", "http://localhost:4566/dynamodb"); runner.setProperty("kinesisEndpoint", "http://localhost:4566/kinesis"); // invalid parameter conversions runner.setProperty("dynamoDBClientConfig", "too-complex"); runner.setProperty("shutdownGraceMillis", "not-long"); final AssertionError ae = assertThrows(AssertionError.class, runner::assertValid); assertThat(ae.getMessage(), startsWith("Processor has 17 validation failures:\n")); // blank properties assertThat(ae.getMessage(), containsString("'Property Name' validated against '' is invalid because Invalid attribute key: <Empty String>\n")); assertThat(ae.getMessage(), containsString("'Property Name' validated against ' ' is invalid because Invalid attribute key: <Empty String>\n")); // invalid property names assertThat(ae.getMessage(), containsString( "'withPrefixNotAllowed' validated against 'a-value' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); assertThat(ae.getMessage(), containsString( "'unknownProperty' validated against 'a-third-value' is invalid because Kinesis Client Library Configuration property with name " + "UnknownProperty does not exist or is not writable\n" )); assertThat(ae.getMessage(), containsString( "'toString' validated against 'cannot-call' is invalid because Kinesis Client Library Configuration property with name " + "ToString does not exist or is not writable\n" )); // invalid property names (cannot use nested/indexed/mapped properties via BeanUtils) assertThat(ae.getMessage(), containsString( "'no.allowed' validated against 'no-.' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); assertThat(ae.getMessage(), containsString( "'no[allowed' validated against 'no-[' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); assertThat(ae.getMessage(), containsString( "'no]allowed' validated against 'no-]' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); assertThat(ae.getMessage(), containsString( "'no(allowed' validated against 'no-(' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); assertThat(ae.getMessage(), containsString( "'no)allowed' validated against 'no-)' is invalid because Property name must not have a prefix of \"with\", " + "must start with a letter and contain only letters, numbers or underscores\n" )); // can't override static properties assertThat(ae.getMessage(), containsString("'regionName' validated against 'af-south-1' is invalid because Use \"Region\" instead of a dynamic property\n")); assertThat(ae.getMessage(), containsString( "'timestampAtInitialPositionInStream' validated against '2021-01-01 00:00:00' is invalid because Use \"Stream Position Timestamp\" instead of a dynamic property\n" )); assertThat(ae.getMessage(), containsString( "'initialPositionInStream' validated against 'AT_TIMESTAMP' is invalid because Use \"Initial Stream Position\" instead of a dynamic property\n" )); assertThat(ae.getMessage(), containsString( "'dynamoDBEndpoint' validated against 'http://localhost:4566/dynamodb' is invalid because Use \"DynamoDB Override\" instead of a dynamic property\n" )); assertThat(ae.getMessage(), containsString( "'kinesisEndpoint' validated against 'http://localhost:4566/kinesis' is invalid because Use \"Endpoint Override URL\" instead of a dynamic property\n" )); // invalid parameter conversions assertThat(ae.getMessage(), containsString( "'dynamoDBClientConfig' validated against 'too-complex' is invalid because Kinesis Client Library Configuration property " + "with name DynamoDBClientConfig cannot be used with value \"too-complex\" : " + "Cannot invoke com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration.withDynamoDBClientConfig on bean class " + "'class com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration' - argument type mismatch - " + "had objects of type \"java.lang.String\" but expected signature \"com.amazonaws.ClientConfiguration\"\n" )); assertThat(ae.getMessage(), containsString("'shutdownGraceMillis' validated against 'not-long' is invalid because " + "Kinesis Client Library Configuration property with name ShutdownGraceMillis " + "cannot be used with value \"not-long\" : Value of ShutdownGraceMillis should be positive, but current value is 0\n")); } @Test public void testValidDynamicKCLProperties() { runner.setProperty("billingMode", "PROVISIONED"); // enum runner.setProperty("idleMillisBetweenCalls", "1000"); // long runner.setProperty("cleanupLeasesUponShardCompletion", "true"); // boolean runner.setProperty("initialLeaseTableReadCapacity", "1"); // int runner.setProperty("DataFetchingStrategy", "DEFAULT"); // String with uppercase leading character in property name runner.assertValid(); } /* * Trigger a run of the ConsumeKinesisStream processor, but expect the KCL Worker to fail (it needs connections to AWS resources) * Assert that our code is being called by checking log output. The ITConsumeKinesisStream integration tests prove actual AWS connectivity */ private void runWorker(final boolean withCredentials, final boolean waitForFailure) throws UnknownHostException, InitializationException, InterruptedException { final TestRunner mockConsumeKinesisStreamRunner = TestRunners.newTestRunner(MockConsumeKinesisStream.class); mockConsumeKinesisStreamRunner.setProperty(ConsumeKinesisStream.KINESIS_STREAM_NAME, "test-stream"); mockConsumeKinesisStreamRunner.setProperty(ConsumeKinesisStream.APPLICATION_NAME, "test-application"); mockConsumeKinesisStreamRunner.setProperty(ConsumeKinesisStream.REGION, Regions.EU_WEST_2.getName()); mockConsumeKinesisStreamRunner.setProperty(ConsumeKinesisStream.TIMEOUT, "5 secs"); final AWSCredentialsProviderService awsCredentialsProviderService = new AWSCredentialsProviderControllerService(); mockConsumeKinesisStreamRunner.addControllerService("aws-credentials", awsCredentialsProviderService); if (withCredentials) { mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.ACCESS_KEY, "test-access"); mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.SECRET_KEY, "test-secret"); } else { mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true"); } mockConsumeKinesisStreamRunner.assertValid(awsCredentialsProviderService); mockConsumeKinesisStreamRunner.enableControllerService(awsCredentialsProviderService); mockConsumeKinesisStreamRunner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "aws-credentials"); // speed up init process for the unit test (and show use of dynamic properties to configure KCL) mockConsumeKinesisStreamRunner.setProperty("parentShardPollIntervalMillis", "1"); mockConsumeKinesisStreamRunner.assertValid(); // start the processor (but don't auto-shutdown to give Worker initialisation a chance to progress) mockConsumeKinesisStreamRunner.run(1, false); final MockConsumeKinesisStream processor = ((MockConsumeKinesisStream) mockConsumeKinesisStreamRunner.getProcessor()); // WorkerState should get to INITIALIZING pretty quickly, but there's a change it will still be at CREATED by the time we get here assertThat(processor.workerState.get(), anyOf(equalTo(WorkerStateChangeListener.WorkerState.INITIALIZING), equalTo(WorkerStateChangeListener.WorkerState.CREATED))); final String hostname = InetAddress.getLocalHost().getCanonicalHostName(); assertKinesisClientLibConfiguration(processor.kinesisClientLibConfiguration, withCredentials, hostname); assertThat(processor.workerBuilder.build().getApplicationName(), equalTo("test-application")); if (!waitForFailure) { // re-trigger the processor to ensure the Worker isn't re-initialised when already running mockConsumeKinesisStreamRunner.run(1, false, false); assertTrue(((MockProcessContext) mockConsumeKinesisStreamRunner.getProcessContext()).isYieldCalled()); // stop the processor mockConsumeKinesisStreamRunner.stop(); } else { for (int runs = 0; runs < 10; runs++) { try { mockConsumeKinesisStreamRunner.run(1, false, false); Thread.sleep(1_000); } catch (AssertionError e) { assertThat(e.getCause(), instanceOf(ProcessException.class)); assertThat(e.getCause().getMessage(), equalTo("Worker has shutdown unexpectedly, possibly due to a configuration issue; check logs for details")); assertTrue(((MockProcessContext) mockConsumeKinesisStreamRunner.getProcessContext()).isYieldCalled()); break; } } } } private void assertKinesisClientLibConfiguration(final KinesisClientLibConfiguration kinesisClientLibConfiguration, final boolean withCredentials, final String hostname) { assertThat(kinesisClientLibConfiguration.getWorkerIdentifier(), startsWith(hostname)); assertThat(kinesisClientLibConfiguration.getApplicationName(), equalTo("test-application")); assertThat(kinesisClientLibConfiguration.getStreamName(), equalTo("test-stream")); if (withCredentials) { assertThat(kinesisClientLibConfiguration.getKinesisCredentialsProvider().getCredentials().getAWSAccessKeyId(), equalTo("test-access")); assertThat(kinesisClientLibConfiguration.getKinesisCredentialsProvider().getCredentials().getAWSSecretKey(), equalTo("test-secret")); assertThat(kinesisClientLibConfiguration.getDynamoDBCredentialsProvider().getCredentials().getAWSAccessKeyId(), equalTo("test-access")); assertThat(kinesisClientLibConfiguration.getDynamoDBCredentialsProvider().getCredentials().getAWSSecretKey(), equalTo("test-secret")); assertThat(kinesisClientLibConfiguration.getCloudWatchCredentialsProvider().getCredentials().getAWSAccessKeyId(), equalTo("test-access")); assertThat(kinesisClientLibConfiguration.getCloudWatchCredentialsProvider().getCredentials().getAWSSecretKey(), equalTo("test-secret")); } else { assertThat(kinesisClientLibConfiguration.getKinesisCredentialsProvider().getCredentials().getAWSAccessKeyId(), nullValue()); assertThat(kinesisClientLibConfiguration.getKinesisCredentialsProvider().getCredentials().getAWSSecretKey(), nullValue()); assertThat(kinesisClientLibConfiguration.getDynamoDBCredentialsProvider().getCredentials().getAWSAccessKeyId(), nullValue()); assertThat(kinesisClientLibConfiguration.getDynamoDBCredentialsProvider().getCredentials().getAWSSecretKey(), nullValue()); assertThat(kinesisClientLibConfiguration.getCloudWatchCredentialsProvider().getCredentials().getAWSAccessKeyId(), nullValue()); assertThat(kinesisClientLibConfiguration.getCloudWatchCredentialsProvider().getCredentials().getAWSSecretKey(), nullValue()); } assertThat(kinesisClientLibConfiguration.getRegionName(), equalTo(Regions.EU_WEST_2.getName())); assertThat(kinesisClientLibConfiguration.getInitialPositionInStream(), equalTo(InitialPositionInStream.LATEST)); assertThat(kinesisClientLibConfiguration.getDynamoDBEndpoint(), nullValue()); assertThat(kinesisClientLibConfiguration.getKinesisEndpoint(), nullValue()); assertThat(kinesisClientLibConfiguration.getKinesisClientConfiguration(), instanceOf(ClientConfiguration.class)); assertThat(kinesisClientLibConfiguration.getDynamoDBClientConfiguration(), instanceOf(ClientConfiguration.class)); assertThat(kinesisClientLibConfiguration.getCloudWatchClientConfiguration(), instanceOf(ClientConfiguration.class)); assertThat(kinesisClientLibConfiguration.getParentShardPollIntervalMillis(), equalTo(1L)); } // public so TestRunners is able to see and instantiate the class for the tests public static class MockConsumeKinesisStream extends ConsumeKinesisStream { // capture the WorkerBuilder and KinesisClientLibConfiguration for unit test assertions KinesisClientLibConfiguration kinesisClientLibConfiguration; Worker.Builder workerBuilder; @Override Worker.Builder prepareWorkerBuilder(final ProcessContext context, final KinesisClientLibConfiguration kinesisClientLibConfiguration, final IRecordProcessorFactory factory) { workerBuilder = super.prepareWorkerBuilder(context, kinesisClientLibConfiguration, factory); return workerBuilder; } @Override KinesisClientLibConfiguration prepareKinesisClientLibConfiguration(final ProcessContext context, final String workerId) { kinesisClientLibConfiguration = super.prepareKinesisClientLibConfiguration(context, workerId); return kinesisClientLibConfiguration; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.junit.Test; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; /** * */ @ESIntegTestCase.SuiteScopeTestCase public class HistogramIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; static int numDocs; static int interval; static int numValueBuckets, numValuesBuckets; static long[] valueCounts, valuesCounts; @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); interval = randomIntBetween(2, 5); numValueBuckets = numDocs / interval + 1; valueCounts = new long[numValueBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket = (i + 1) / interval; valueCounts[bucket]++; } numValuesBuckets = (numDocs + 1) / interval + 1; valuesCounts = new long[numValuesBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket1 = (i + 1) / interval; final int bucket2 = (i + 2) / interval; valuesCounts[bucket1]++; if (bucket1 != bucket2) { valuesCounts[bucket2]++; } } List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i + 1) .startArray(MULTI_VALUED_FIELD_NAME).value(i + 1).value(i + 2).endArray() .field("tag", "tag" + i) .endObject())); } assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i * 2) .endObject())); } indexRandom(true, builders); ensureSearchable(); } @Test public void singleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } public void singleValuedField_withOffset() throws Exception { int interval1 = 10; int offset = 5; SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) .execute().actionGet(); // from setup we have between 6 and 20 documents, each with value 1 in test field int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); // first bucket should start at -5, contain 4 documents Histogram.Bucket bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(-5L)); assertThat(bucket.getDocCount(), equalTo(4L)); // last bucket should have (numDocs % interval + 1) docs bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(numDocs%interval1 + 5L)); assertThat(bucket.getDocCount(), equalTo((numDocs % interval) + 1L)); } /** * Shift buckets by random offset between [2..interval]. From setup we have 1 doc per values from 1..numdocs. * Special care needs to be taken for expecations on counts in first and last bucket. */ @Test public void singleValuedField_withRandomOffset() throws Exception { int offset = randomIntBetween(2, interval); SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) .execute().actionGet(); assertSearchResponse(response); // shifting by offset>2 creates new extra bucket [0,offset-1] // if offset is >= number of values in original last bucket, that effect is canceled int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); int docsCounted = 0; for (int i = 0; i < expectedNumberOfBuckets; ++i) { Histogram.Bucket bucket = histo.getBuckets().get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i-1) * interval + offset))); if (i==0) { // first bucket long expectedFirstBucketCount = offset-1; assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); docsCounted += expectedFirstBucketCount; } else if(i<expectedNumberOfBuckets-1) { assertThat(bucket.getDocCount(), equalTo((long) interval)); docsCounted += interval; } else { assertThat(bucket.getDocCount(), equalTo((long) numDocs - docsCounted)); } } } @Test public void singleValuedField_OrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByCountAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MIN_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), greaterThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_OrderedByCountDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MAX_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), lessThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_WithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); assertThat((double) propertiesCounts[i], equalTo((double) s)); } } @Test public void singleValuedField_WithSubAggregation_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void singleValuedField_OrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", true)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", false)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationAsc_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", true)) .subAggregation(stats("stats"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", false)) .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc_DeepOrderPath() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("filter>max", asc)) .subAggregation(filter("filter").filter(matchAllQuery()) .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Filter filter = bucket.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(bucket.getDocCount(), equalTo(filter.getDocCount())); Max max = filter.getAggregations().get("max"); assertThat(max, Matchers.notNullValue()); assertThat(max.getValue(), asc ? greaterThanOrEqualTo(prevMax) : lessThanOrEqualTo(prevMax)); prevMax = max.getValue(); } } @Test public void singleValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 1) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 1) / interval + 1]; for (int i = 0; i < numDocs; ++i) { ++counts[(i + 2) / interval]; } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField_WithValueScript_WithInheritedSubAggregator() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo") .field(MULTI_VALUED_FIELD_NAME) .script(new Script("_value + 1")) .interval(interval) .subAggregation( terms(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())).order( Terms.Order.term(true)))).execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo(MULTI_VALUED_FIELD_NAME)); int minTerm = Math.max(2, key - 1); int maxTerm = Math.min(numDocs + 2, (key / interval + 1) * interval); assertThat(terms.getBuckets().size(), equalTo(maxTerm - minTerm + 1)); Iterator<Terms.Bucket> iter = terms.getBuckets().iterator(); for (int j = minTerm; j <= maxTerm; ++j) { assertThat(iter.next().getKeyAsNumber().longValue(), equalTo((long) j)); } } } @Test public void script_SingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void script_SingleValue_WithSubAggregator_Inherited() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).interval(interval) .subAggregation(sum("sum"))).execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void script_MultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void script_MultiValued_WithAggregatorInherited() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")).interval(interval) .subAggregation(sum("sum"))).execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i || (j + 2) / interval == i) { s += j + 1; s += j + 2; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void unmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(0)); } @Test public void partiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void emptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1l).minDocCount(0) .subAggregation(histogram("sub_histo").interval(1l))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); Histogram histo = searchResponse.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List<? extends Bucket> buckets = histo.getBuckets(); Histogram.Bucket bucket = buckets.get(1); assertThat(bucket, Matchers.notNullValue()); histo = bucket.getAggregations().get("sub_histo"); assertThat(histo, Matchers.notNullValue()); assertThat(histo.getName(), equalTo("sub_histo")); assertThat(histo.getBuckets().isEmpty(), is(true)); } @Test public void singleValuedField_WithExtendedBounds() throws Exception { int lastDataBucketKey = (numValueBuckets - 1) * interval; // randomizing the number of buckets on the min bound // (can sometimes fall within the data range, but more frequently will fall before the data range) int addedBucketsLeft = randomIntBetween(0, numValueBuckets); long boundsMinKey = addedBucketsLeft * interval; if (frequently()) { boundsMinKey = -boundsMinKey; } else { addedBucketsLeft = 0; } long boundsMin = boundsMinKey + randomIntBetween(0, interval - 1); // randomizing the number of buckets on the max bound // (can sometimes fall within the data range, but more frequently will fall after the data range) int addedBucketsRight = randomIntBetween(0, numValueBuckets); long boundsMaxKeyDelta = addedBucketsRight * interval; if (rarely()) { addedBucketsRight = 0; boundsMaxKeyDelta = -boundsMaxKeyDelta; } long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an // error boolean invalidBoundsError = boundsMin > boundsMax; // constructing the newly expected bucket list int bucketsCount = numValueBuckets + addedBucketsLeft + addedBucketsRight; long[] extendedValueCounts = new long[bucketsCount]; System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); SearchResponse response = null; try { response = client().prepareSearch("idx") .addAggregation(histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .minDocCount(0) .extendedBounds(boundsMin, boundsMax)) .execute().actionGet(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); return; } } catch (Exception e) { if (invalidBoundsError) { // expected return; } else { throw e; } } assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(bucketsCount)); long key = Math.min(boundsMinKey, 0); for (int i = 0; i < bucketsCount; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i])); key += interval; } } /** * see issue #9634, negative interval in histogram should raise exception */ public void testExeptionOnNegativerInterval() { try { client().prepareSearch("empty_bucket_idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)).execute().actionGet(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("Missing required field [interval]")); } } }
/* * Copyright 2012 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.wicket.panels; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.markup.repeater.OddEvenItem; import org.apache.wicket.markup.repeater.RefreshingView; import org.apache.wicket.markup.repeater.util.ModelIteratorAdapter; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.eclipse.jgit.lib.PersonIdent; import com.gitblit.Constants.AccessPermission; import com.gitblit.Constants.PermissionType; import com.gitblit.Constants.RegistrantType; import com.gitblit.models.RegistrantAccessPermission; import com.gitblit.models.UserModel; import com.gitblit.utils.DeepCopier; import com.gitblit.utils.StringUtils; import com.gitblit.wicket.WicketUtils; /** * Allows user to manipulate registrant access permissions. * * @author James Moger * */ public class RegistrantPermissionsPanel extends BasePanel { private static final long serialVersionUID = 1L; public enum Show { specified, mutable, effective; public boolean show(RegistrantAccessPermission ap) { switch (this) { case specified: return ap.mutable || ap.isOwner(); case mutable: return ap.mutable; case effective: return true; default: return true; } } } private Show activeState = Show.mutable; public RegistrantPermissionsPanel(String wicketId, RegistrantType registrantType, List<String> allRegistrants, final List<RegistrantAccessPermission> permissions, final Map<AccessPermission, String> translations) { super(wicketId); setOutputMarkupId(true); /* * Permission view toggle buttons */ Form<Void> permissionToggleForm = new Form<Void>("permissionToggleForm"); permissionToggleForm.add(new ShowStateButton("showSpecified", Show.specified)); permissionToggleForm.add(new ShowStateButton("showMutable", Show.mutable)); permissionToggleForm.add(new ShowStateButton("showEffective", Show.effective)); add(permissionToggleForm); /* * Permission repeating display */ RefreshingView<RegistrantAccessPermission> dataView = new RefreshingView<RegistrantAccessPermission>("permissionRow") { private static final long serialVersionUID = 1L; @Override protected Iterator<IModel<RegistrantAccessPermission>> getItemModels() { // the iterator returns RepositoryPermission objects, but we need it to // return models return new ModelIteratorAdapter<RegistrantAccessPermission>(permissions.iterator()) { @Override protected IModel<RegistrantAccessPermission> model(RegistrantAccessPermission permission) { return new CompoundPropertyModel<RegistrantAccessPermission>(permission); } }; } @Override protected Item<RegistrantAccessPermission> newItem(String id, int index, IModel<RegistrantAccessPermission> model) { // this item sets markup class attribute to either 'odd' or // 'even' for decoration return new OddEvenItem<RegistrantAccessPermission>(id, index, model); } @Override public void populateItem(final Item<RegistrantAccessPermission> item) { final RegistrantAccessPermission entry = item.getModelObject(); if (RegistrantType.REPOSITORY.equals(entry.registrantType)) { String repoName = StringUtils.stripDotGit(entry.registrant); if (!entry.isMissing() && StringUtils.findInvalidCharacter(repoName) == null) { // repository, strip .git and show swatch Fragment repositoryFragment = new Fragment("registrant", "repositoryRegistrant", RegistrantPermissionsPanel.this); Component swatch = new Label("repositorySwatch", "&nbsp;").setEscapeModelStrings(false); WicketUtils.setCssBackground(swatch, entry.toString()); repositoryFragment.add(swatch); Label registrant = new Label("repositoryName", repoName); repositoryFragment.add(registrant); item.add(repositoryFragment); } else { // regex or missing Label label = new Label("registrant", entry.registrant); WicketUtils.setCssStyle(label, "font-weight: bold;"); item.add(label); } } else if (RegistrantType.USER.equals(entry.registrantType)) { // user PersonIdent ident = new PersonIdent(entry.registrant, ""); UserModel user = app().users().getUserModel(entry.registrant); if (user != null) { ident = new PersonIdent(user.getDisplayName(), user.emailAddress == null ? user.getDisplayName() : user.emailAddress); } Fragment userFragment = new Fragment("registrant", "userRegistrant", RegistrantPermissionsPanel.this); userFragment.add(new GravatarImage("userAvatar", ident, 20, false)); userFragment.add(new Label("userName", entry.registrant)); item.add(userFragment); } else { // team Fragment teamFragment = new Fragment("registrant", "teamRegistrant", RegistrantPermissionsPanel.this); teamFragment.add(new Label("teamName", entry.registrant)); item.add(teamFragment); } switch (entry.permissionType) { case ADMINISTRATOR: Label administrator = new Label("pType", entry.source == null ? getString("gb.administrator") : entry.source); WicketUtils.setHtmlTooltip(administrator, getString("gb.administratorPermission")); WicketUtils.setCssClass(administrator, "label label-inverse"); item.add(administrator); break; case OWNER: Label owner = new Label("pType", getString("gb.owner")); WicketUtils.setHtmlTooltip(owner, getString("gb.ownerPermission")); WicketUtils.setCssClass(owner, "label label-info"); item.add(owner); break; case TEAM: Label team = new Label("pType", entry.source == null ? getString("gb.team") : entry.source); WicketUtils.setHtmlTooltip(team, MessageFormat.format(getString("gb.teamPermission"), entry.source)); WicketUtils.setCssClass(team, "label label-success"); item.add(team); break; case REGEX: Label regex = new Label("pType", "regex"); if (!StringUtils.isEmpty(entry.source)) { WicketUtils.setHtmlTooltip(regex, MessageFormat.format(getString("gb.regexPermission"), entry.source)); } WicketUtils.setCssClass(regex, "label"); item.add(regex); break; default: if (entry.isMissing()) { // repository is missing, this permission will be removed on save Label missing = new Label("pType", getString("gb.missing")); WicketUtils.setCssClass(missing, "label label-important"); WicketUtils.setHtmlTooltip(missing, getString("gb.missingPermission")); item.add(missing); } else { // standard permission item.add(new Label("pType", "").setVisible(false)); } break; } item.setVisible(activeState.show(entry)); // use ajax to get immediate update of permission level change // otherwise we can lose it if they change levels and then add // a new repository permission final DropDownChoice<AccessPermission> permissionChoice = new DropDownChoice<AccessPermission>( "permission", Arrays.asList(AccessPermission.values()), new AccessPermissionRenderer(translations)); // only allow changing an explicitly defined permission // this is designed to prevent changing a regex permission in // a repository permissionChoice.setEnabled(entry.mutable); permissionChoice.setOutputMarkupId(true); if (entry.mutable) { permissionChoice.add(new AjaxFormComponentUpdatingBehavior("onchange") { private static final long serialVersionUID = 1L; @Override protected void onUpdate(AjaxRequestTarget target) { target.addComponent(permissionChoice); } }); } item.add(permissionChoice); } }; add(dataView); setOutputMarkupId(true); // filter out registrants we already have permissions for final List<String> registrants = new ArrayList<String>(allRegistrants); for (RegistrantAccessPermission rp : permissions) { if (rp.mutable) { // remove editable duplicates // this allows for specifying an explicit permission registrants.remove(rp.registrant); } else if (rp.isAdmin()) { // administrators can not have their permission changed registrants.remove(rp.registrant); } else if (rp.isOwner()) { // owners can not have their permission changed registrants.remove(rp.registrant); } } /* * Add permission form */ IModel<RegistrantAccessPermission> addPermissionModel = new CompoundPropertyModel<RegistrantAccessPermission>(new RegistrantAccessPermission(registrantType)); Form<RegistrantAccessPermission> addPermissionForm = new Form<RegistrantAccessPermission>("addPermissionForm", addPermissionModel); addPermissionForm.add(new DropDownChoice<String>("registrant", registrants)); addPermissionForm.add(new DropDownChoice<AccessPermission>("permission", Arrays .asList(AccessPermission.NEWPERMISSIONS), new AccessPermissionRenderer(translations))); AjaxButton button = new AjaxButton("addPermissionButton", addPermissionForm) { private static final long serialVersionUID = 1L; @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { // add permission to our list RegistrantAccessPermission rp = (RegistrantAccessPermission) form.getModel().getObject(); if (rp.permission == null) { return; } if (rp.registrant == null) { return; } RegistrantAccessPermission copy = DeepCopier.copy(rp); if (StringUtils.findInvalidCharacter(copy.registrant) != null) { copy.permissionType = PermissionType.REGEX; copy.source = copy.registrant; } permissions.add(copy); // resort permissions after insert to convey idea of eval order Collections.sort(permissions); // remove registrant from available choices registrants.remove(rp.registrant); // force the panel to refresh target.addComponent(RegistrantPermissionsPanel.this); } }; addPermissionForm.add(button); // only show add permission form if we have a registrant choice add(addPermissionForm.setVisible(registrants.size() > 0)); } @Override protected boolean getStatelessHint() { return false; } private class AccessPermissionRenderer implements IChoiceRenderer<AccessPermission> { private static final long serialVersionUID = 1L; private final Map<AccessPermission, String> map; public AccessPermissionRenderer(Map<AccessPermission, String> map) { this.map = map; } @Override public String getDisplayValue(AccessPermission type) { return map.get(type); } @Override public String getIdValue(AccessPermission type, int index) { return Integer.toString(index); } } private class ShowStateButton extends AjaxButton { private static final long serialVersionUID = 1L; Show buttonState; public ShowStateButton(String wicketId, Show state) { super(wicketId); this.buttonState = state; setOutputMarkupId(true); } @Override protected void onBeforeRender() { String cssClass = "btn"; if (buttonState.equals(RegistrantPermissionsPanel.this.activeState)) { cssClass = "btn btn-info active"; } WicketUtils.setCssClass(this, cssClass); super.onBeforeRender(); } @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { RegistrantPermissionsPanel.this.activeState = buttonState; target.addComponent(RegistrantPermissionsPanel.this); } }; }
package codechicken.nei; import codechicken.nei.api.*; import codechicken.nei.guihook.GuiContainerManager; import codechicken.nei.guihook.IContainerInputHandler; import codechicken.nei.guihook.IContainerSlotClickHandler; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.inventory.GuiContainer; import net.minecraft.client.gui.inventory.GuiContainerCreative; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.inventory.Slot; import net.minecraft.inventory.SlotCrafting; import net.minecraft.item.ItemStack; import java.awt.*; import java.util.LinkedList; import static codechicken.lib.gui.GuiDraw.getMousePosition; public class NEIController implements IContainerSlotClickHandler, IContainerInputHandler { private static NEIController instance = new NEIController(); public static GuiContainerManager manager; public static FastTransferManager fastTransferManager; private static boolean deleteMode; private static int pickedUpFromSlot; private static IInfiniteItemHandler heldStackInfinite; private static int selectedItem; private ItemStack firstheld; public static void load() { GuiContainerManager.addSlotClickHandler(instance); GuiContainerManager.addInputHandler(instance); } public static void load(GuiContainer gui) { manager = GuiContainerManager.getManager(gui); deleteMode = false; GuiInfo.clearGuiHandlers(); fastTransferManager = null; if (!NEIClientConfig.isEnabled()) return; fastTransferManager = new FastTransferManager(); if (gui instanceof INEIGuiHandler) API.registerNEIGuiHandler((INEIGuiHandler) gui); } public static boolean isSpreading(GuiContainer gui) { return gui.dragSplitting && gui.dragSplittingSlots.size() > 1; } public static void updateUnlimitedItems(InventoryPlayer inventory) { if (!NEIClientConfig.canPerformAction("item") || !NEIClientConfig.hasSMPCounterPart()) return; LinkedList<ItemStack> beforeStacks = new LinkedList<ItemStack>(); for (int i = 0; i < inventory.getSizeInventory(); i++) beforeStacks.add(NEIServerUtils.copyStack(inventory.getStackInSlot(i))); for (int i = 0; i < inventory.getSizeInventory(); i++) { ItemStack stack = inventory.getStackInSlot(i); if (stack == null) continue; for (IInfiniteItemHandler handler : ItemInfo.infiniteHandlers) if (handler.canHandleItem(stack) && handler.isItemInfinite(stack)) handler.replenishInfiniteStack(inventory, i); } for (int i = 0; i < inventory.getSizeInventory(); i++) { ItemStack newstack = inventory.getStackInSlot(i); if (!NEIServerUtils.areStacksIdentical(beforeStacks.get(i), newstack)) { inventory.setInventorySlotContents(i, beforeStacks.get(i));//restore in case of SMP fail NEIClientUtils.setSlotContents(i, newstack, false);//sends via SMP handler ;) } } } public static void processCreativeCycling(InventoryPlayer inventory) { if (NEIClientConfig.invCreativeMode() && NEIClientUtils.controlKey()) { if (selectedItem != inventory.currentItem) { if (inventory.currentItem == selectedItem + 1 || (inventory.currentItem == 0 && selectedItem == 8))//foward { NEICPH.sendCreativeScroll(1); inventory.currentItem = selectedItem; } else if (inventory.currentItem == selectedItem - 1 || (inventory.currentItem == 8 && selectedItem == 0)) { NEICPH.sendCreativeScroll(-1); inventory.currentItem = selectedItem; } } } selectedItem = inventory.currentItem; } @Override public void beforeSlotClick(GuiContainer gui, int slotIndex, int button, Slot slot, int modifier) { if (!NEIClientConfig.isEnabled()) return; firstheld = NEIClientUtils.getHeldItem(); } @Override public boolean handleSlotClick(GuiContainer gui, int slotIndex, int button, Slot slot, int modifier, boolean eventconsumed) { if (eventconsumed || !NEIClientConfig.isEnabled() || isSpreading(gui)) return eventconsumed; if (deleteMode && slotIndex >= 0 && slot != null) { if (NEIClientUtils.shiftKey() && button == 0) { ItemStack itemstack1 = slot.getStack(); if (itemstack1 != null) NEIClientUtils.deleteItemsOfType(itemstack1); } else if (button == 1) NEIClientUtils.decreaseSlotStack(slot.slotNumber); else NEIClientUtils.deleteSlotStack(slot.slotNumber); return true; } if (button == 1 && slot instanceof SlotCrafting)//right click { for (int i1 = 0; i1 < 64; i1++)//click this slot 64 times manager.handleSlotClick(slot.slotNumber, button, 0); return true; } if (NEIClientUtils.controlKey() && slot != null && slot.getStack() != null && slot.isItemValid(slot.getStack())) { NEIClientUtils.cheatItem(slot.getStack(), button, 1); return true; } if(GuiInfo.hasCustomSlots(gui)) return false; if (slotIndex >= 0 && NEIClientUtils.shiftKey() && NEIClientUtils.getHeldItem() != null && !slot.getHasStack()) { ItemStack held = NEIClientUtils.getHeldItem(); manager.handleSlotClick(slot.slotNumber, button, 0); if (slot.isItemValid(held) && !ItemInfo.fastTransferExemptions.contains(slot.getClass())) fastTransferManager.performMassTransfer(gui, pickedUpFromSlot, slotIndex, held); return true; } if (slotIndex == -999 && NEIClientUtils.shiftKey() && button == 0) { fastTransferManager.throwAll(gui, pickedUpFromSlot); return true; } return false; } @Override public void afterSlotClick(GuiContainer gui, int slotIndex, int button, Slot slot, int modifier) { if (!NEIClientConfig.isEnabled()) return; ItemStack nowHeld = NEIClientUtils.getHeldItem(); if (firstheld != nowHeld) pickedUpFromSlot = slotIndex; if (NEIClientConfig.canPerformAction("item") && NEIClientConfig.hasSMPCounterPart()) { if (heldStackInfinite != null && slot != null && slot.inventory == Minecraft.getMinecraft().thePlayer.inventory) { ItemStack stack = slot.getStack(); if (stack != null) { heldStackInfinite.onPlaceInfinite(stack); } NEIClientUtils.setSlotContents(slotIndex, stack, true); } if (firstheld != nowHeld) heldStackInfinite = null; if (firstheld != nowHeld && nowHeld != null) { for (IInfiniteItemHandler handler : ItemInfo.infiniteHandlers) { if (handler.canHandleItem(nowHeld) && handler.isItemInfinite(nowHeld)) { handler.onPickup(nowHeld); NEIClientUtils.setSlotContents(-999, nowHeld, true); heldStackInfinite = handler; break; } } } } } @Override public boolean lastKeyTyped(GuiContainer gui, char keyChar, int keyCode) { if (!NEIClientConfig.isEnabled() || GuiInfo.hasCustomSlots(gui) || isSpreading(gui)) return false; Slot slot = GuiContainerManager.getSlotMouseOver(gui); if (slot == null) return false; int slotIndex = slot.slotNumber; if (keyCode == Minecraft.getMinecraft().gameSettings.keyBindDrop.getKeyCode() && NEIClientUtils.shiftKey()) { FastTransferManager.clickSlot(gui, slotIndex); fastTransferManager.throwAll(gui, slotIndex); FastTransferManager.clickSlot(gui, slotIndex); return true; } return false; } @Override public boolean mouseScrolled(GuiContainer gui, int mousex, int mousey, int scrolled) { if (!NEIClientConfig.isEnabled() || GuiInfo.hasCustomSlots(gui)) return false; Point mousePos = getMousePosition(); Slot mouseover = manager.window.getSlotAtPosition(mousePos.x, mousePos.y); if (mouseover != null && mouseover.getHasStack()) { if (scrolled > 0) fastTransferManager.transferItem(manager.window, mouseover.slotNumber); else fastTransferManager.retrieveItem(manager.window, mouseover.slotNumber); return true; } return false; } @Override public boolean keyTyped(GuiContainer gui, char keyChar, int keyCode) { return false; } @Override public boolean mouseClicked(GuiContainer gui, int mousex, int mousey, int button) { return false; } @Override public void onKeyTyped(GuiContainer gui, char keyChar, int keyID) { } @Override public void onMouseClicked(GuiContainer gui, int mousex, int mousey, int button) { } @Override public void onMouseDragged(GuiContainer gui, int mousex, int mousey, int button, long heldTime) { } @Override public void onMouseScrolled(GuiContainer gui, int mousex, int mousey, int scrolled) { } @Override public void onMouseUp(GuiContainer gui, int mousex, int mousey, int button) { } public static boolean canUseDeleteMode() { return !(NEIClientUtils.getGuiContainer() instanceof GuiContainerCreative); } public static void toggleDeleteMode() { if(canUseDeleteMode()) deleteMode = !deleteMode; } public static boolean getDeleteMode() { return deleteMode; } }
/** * Copyright 2015 Atos * Contact: Atos <roman.sosa@atos.net> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.atos.sla.datamodel.bean; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.MappedSuperclass; import eu.atos.sla.datamodel.ICompensationDefinition; //@Entity //@Table(name="compensation") //@Access(AccessType.FIELD) @MappedSuperclass public abstract class CompensationDefinition implements Serializable, ICompensationDefinition { private static final long serialVersionUID = 1L; protected static int DEFAULT_COUNT = 0; protected static Date DEFAULT_INTERVAL = new Date(0); protected static String DEFAULT_VALUE_EXPRESSION = ""; protected static String DEFAULT_VALUE_UNIT = ""; protected static String DEFAULT_ACTION = ""; protected static String DEFAULT_VALIDITY = ""; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id", unique = true, nullable = false) private Long id; @Column(name="kind", nullable=false) @Enumerated(EnumType.STRING) private CompensationKind kind; @Column(name = "time_interval", nullable=false) private Date timeInterval; @Column(name="number", nullable=false) private int count; @Column(name="value_unit", nullable=false) private String valueUnit; @Column(name="value_expression", nullable=false) private String valueExpression; @Column(name="action", nullable=false) private String action; @Column(name="validity", nullable=false) private String validity; public CompensationDefinition() { this.kind = ICompensationDefinition.CompensationKind.UNKNOWN; this.timeInterval = DEFAULT_INTERVAL; this.count = DEFAULT_COUNT; this.action = DEFAULT_ACTION; this.validity = DEFAULT_VALIDITY; this.valueExpression = DEFAULT_VALUE_EXPRESSION; this.valueUnit = DEFAULT_VALUE_UNIT; } /** * Constructor for wsag compensations */ protected CompensationDefinition(CompensationKind kind, Date timeInterval, String valueUnit, String valueExpression) { checkNotNull(kind, "kind"); checkNotNull(timeInterval, "timeInterval"); checkNotNull(valueUnit, "valueUnit"); checkNotNull(valueExpression, "valueExpression"); this.kind = kind; this.timeInterval = timeInterval; this.valueUnit = valueUnit; this.valueExpression = valueExpression; this.count = DEFAULT_COUNT; this.action = DEFAULT_ACTION; this.validity = DEFAULT_VALIDITY; } /** * Constructor for wsag compensations */ protected CompensationDefinition(CompensationKind kind, int count, String valueUnit, String valueExpression) { checkNotNull(kind, "kind"); checkNotNull(valueUnit, "valueUnit"); checkNotNull(valueExpression, "valueExpression"); this.kind = kind; this.count = count; this.valueUnit = valueUnit; this.valueExpression = valueExpression; this.timeInterval = DEFAULT_INTERVAL; this.action = DEFAULT_ACTION; this.validity = DEFAULT_VALIDITY; } /** * Constructor for extended compensations */ protected CompensationDefinition(CompensationKind kind, int count, Date timeInterval, String action, String valueUnit, String valueExpression, String validity) { checkNotNull(kind, "kind"); checkNotNull(timeInterval, "timeInterval"); checkNotNull(action, "action"); checkNotNull(valueUnit, "valueUnit"); checkNotNull(valueExpression, "valueExpression"); checkNotNull(validity, "validity"); this.kind = kind; this.count = count; this.timeInterval = timeInterval; this.valueUnit = valueUnit; this.valueExpression = valueExpression; this.action = action; this.validity = validity; } private void checkNotNull(Object o, String property) { if (o == null) { throw new NullPointerException(property + " cannot be null"); } } @Override public Long getId() { return id; } @Override public CompensationKind getKind() { return kind; } @Override public Date getTimeInterval() { return timeInterval; } @Override public Integer getCount() { return count; } @Override public String getValueUnit() { return valueUnit; } @Override public String getValueExpression() { return valueExpression; } @Override public String getAction() { return action; } @Override public String getValidity() { return validity; } @Override public String toString() { String fmt = ""; fmt = "<CompensationDefinition(" + "kind=%s,timeInterval=%d ms,count=%d,action='%s',valueUnit=%s,valueExpression=%s,validity=%s)>"; return String.format(fmt, kind.toString(), timeInterval.getTime(), count, action, valueUnit, valueExpression, validity); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((action == null) ? 0 : action.hashCode()); result = prime * result + count; result = prime * result + ((kind == null) ? 0 : kind.hashCode()); result = prime * result + ((timeInterval == null) ? 0 : timeInterval.hashCode()); result = prime * result + ((validity == null) ? 0 : validity.hashCode()); result = prime * result + ((valueExpression == null) ? 0 : valueExpression.hashCode()); result = prime * result + ((valueUnit == null) ? 0 : valueUnit.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof CompensationDefinition)) { return false; } CompensationDefinition other = (CompensationDefinition) obj; if (action == null) { if (other.action != null) { return false; } } else if (!action.equals(other.action)) { return false; } if (count != other.count) { return false; } if (kind != other.kind) { return false; } /* * Direct Date compare gives a lot of problems with timezones */ if (timeInterval == null) { if (other.timeInterval != null) { return false; } } else if (timeInterval.getTime() != other.timeInterval.getTime()) { return false; } if (validity == null) { if (other.validity != null) { return false; } } else if (!validity.equals(other.validity)) { return false; } if (valueExpression == null) { if (other.valueExpression != null) { return false; } } else if (!valueExpression.equals(other.valueExpression)) { return false; } if (valueUnit == null) { if (other.valueUnit != null) { return false; } } else if (!valueUnit.equals(other.valueUnit)) { return false; } return true; } public static final ICompensationDefinition EMPTY_COMPENSATION_DEFINITION = new ICompensationDefinition() { @Override public String getValueUnit() { return DEFAULT_VALUE_UNIT; } @Override public String getValueExpression() { return DEFAULT_VALUE_EXPRESSION; } @Override public Date getTimeInterval() { return DEFAULT_INTERVAL; } @Override public CompensationKind getKind() { return CompensationKind.UNKNOWN; } @Override public Long getId() { return null; } @Override public Integer getCount() { return DEFAULT_COUNT; } @Override public String getAction() { return DEFAULT_ACTION; } @Override public String getValidity() { return DEFAULT_VALIDITY; } }; }
/* * MusicTag Copyright (C)2003,2004 * * This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser * General Public License as published by the Free Software Foundation; either version 2.1 of the License, * or (at your option) any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License along with this library; if not, * you can getFields a copy from http://www.opensource.org/licenses/lgpl-license.php or write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jaudiotagger.tag.id3; import org.jaudiotagger.FileConstants; import org.jaudiotagger.audio.mp3.MP3File; import org.jaudiotagger.logging.ErrorMessage; import org.jaudiotagger.tag.*; import org.jaudiotagger.tag.datatype.Artwork; import org.jaudiotagger.tag.datatype.DataTypes; import org.jaudiotagger.tag.id3.framebody.*; import org.jaudiotagger.tag.reference.PictureTypes; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import java.util.ArrayList; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.logging.Level; /** * Represents an ID3v2.3 tag. * * @author : Paul Taylor * @author : Eric Farng * @version $Id: ID3v23Tag.java 932 2010-11-26 13:13:15Z paultaylor $ */ public class ID3v23Tag extends AbstractID3v2Tag { protected static final String TYPE_CRCDATA = "crcdata"; protected static final String TYPE_EXPERIMENTAL = "experimental"; protected static final String TYPE_EXTENDED = "extended"; protected static final String TYPE_PADDINGSIZE = "paddingsize"; protected static final String TYPE_UNSYNCHRONISATION = "unsyncronisation"; protected static int TAG_EXT_HEADER_LENGTH = 10; protected static int TAG_EXT_HEADER_CRC_LENGTH = 4; protected static int FIELD_TAG_EXT_SIZE_LENGTH = 4; protected static int TAG_EXT_HEADER_DATA_LENGTH = TAG_EXT_HEADER_LENGTH - FIELD_TAG_EXT_SIZE_LENGTH; /** * ID3v2.3 Header bit mask */ public static final int MASK_V23_UNSYNCHRONIZATION = FileConstants.BIT7; /** * ID3v2.3 Header bit mask */ public static final int MASK_V23_EXTENDED_HEADER = FileConstants.BIT6; /** * ID3v2.3 Header bit mask */ public static final int MASK_V23_EXPERIMENTAL = FileConstants.BIT5; /** * ID3v2.3 Extended Header bit mask */ public static final int MASK_V23_CRC_DATA_PRESENT = FileConstants.BIT7; /** * ID3v2.3 RBUF frame bit mask */ public static final int MASK_V23_EMBEDDED_INFO_FLAG = FileConstants.BIT1; /** * CRC Checksum calculated */ protected boolean crcDataFlag = false; /** * Experiemntal tag */ protected boolean experimental = false; /** * Contains extended header */ protected boolean extended = false; /** * Crcdata Checksum in extended header */ private int crc32; /** * Tag padding */ private int paddingSize = 0; /** * All frames in the tag uses unsynchronisation */ protected boolean unsynchronization = false; /** * The tag is compressed */ protected boolean compression = false; public static final byte RELEASE = 2; public static final byte MAJOR_VERSION = 3; public static final byte REVISION = 0; /** * Retrieve the Release */ public byte getRelease() { return RELEASE; } /** * Retrieve the Major Version */ public byte getMajorVersion() { return MAJOR_VERSION; } /** * Retrieve the Revision */ public byte getRevision() { return REVISION; } /** * @return Cyclic Redundancy Check 32 Value */ public int getCrc32() { return crc32; } /** * Creates a new empty ID3v2_3 datatype. */ public ID3v23Tag() { frameMap = new LinkedHashMap(); encryptedFrameMap = new LinkedHashMap(); } /** * Copy primitives applicable to v2.3 */ protected void copyPrimitives(AbstractID3v2Tag copyObj) { //logger.info("Copying primitives"); super.copyPrimitives(copyObj); if (copyObj instanceof ID3v23Tag) { ID3v23Tag copyObject = (ID3v23Tag) copyObj; this.crcDataFlag = copyObject.crcDataFlag; this.experimental = copyObject.experimental; this.extended = copyObject.extended; this.crc32 = copyObject.crc32; this.paddingSize = copyObject.paddingSize; } } protected void addFrame(AbstractID3v2Frame frame) { try { //Special case to handle TDRC frame from V24 that needs breaking up into separate frame in V23 if ((frame.getIdentifier().equals(ID3v24Frames.FRAME_ID_YEAR)) && (frame.getBody() instanceof FrameBodyTDRC)) { translateFrame(frame); } else if (frame instanceof ID3v23Frame) { copyFrameIntoMap(frame.getIdentifier(), frame); } else { ID3v23Frame newFrame = new ID3v23Frame(frame); copyFrameIntoMap(newFrame.getIdentifier(), newFrame); } } catch (InvalidFrameException ife) { logger.log(Level.SEVERE, "Unable to convert frame:" + frame.getIdentifier()); } } /** * This is used when we need to translate a single frame into multiple frames, * currently required for v24 TDRC frames. * * @param frame */ //TODO will overwrite any existing TYER or TIME frame, do we ever want multiples of these protected void translateFrame(AbstractID3v2Frame frame) { FrameBodyTDRC tmpBody = (FrameBodyTDRC) frame.getBody(); ID3v23Frame newFrame; if (!tmpBody.getYear().equals("")) { newFrame = new ID3v23Frame(ID3v23Frames.FRAME_ID_V3_TYER); ((FrameBodyTYER) newFrame.getBody()).setText(tmpBody.getYear()); //logger.info("Adding Frame:" + newFrame.getIdentifier()); frameMap.put(newFrame.getIdentifier(), newFrame); } if (!tmpBody.getDate().equals("")) { newFrame = new ID3v23Frame(ID3v23Frames.FRAME_ID_V3_TDAT); ((FrameBodyTDAT) newFrame.getBody()).setText(tmpBody.getDate()); ((FrameBodyTDAT) newFrame.getBody()).setMonthOnly(tmpBody.isMonthOnly()); //logger.info("Adding Frame:" + newFrame.getIdentifier()); frameMap.put(newFrame.getIdentifier(), newFrame); } if (!tmpBody.getTime().equals("")) { newFrame = new ID3v23Frame(ID3v23Frames.FRAME_ID_V3_TIME); ((FrameBodyTIME) newFrame.getBody()).setText(tmpBody.getTime()); ((FrameBodyTIME) newFrame.getBody()).setHoursOnly(tmpBody.isHoursOnly()); //logger.info("Adding Frame:" + newFrame.getIdentifier()); frameMap.put(newFrame.getIdentifier(), newFrame); } } /** * Copy Constructor, creates a new ID3v2_3 Tag based on another ID3v2_3 Tag * * @param copyObject */ public ID3v23Tag(ID3v23Tag copyObject) { //This doesn't do anything. super(copyObject); //logger.info("Creating tag from another tag of same type"); copyPrimitives(copyObject); copyFrames(copyObject); } /** * Constructs a new tag based upon another tag of different version/type * * @param mp3tag */ public ID3v23Tag(AbstractTag mp3tag) { //logger.info("Creating tag from a tag of a different version"); frameMap = new LinkedHashMap(); encryptedFrameMap = new LinkedHashMap(); if (mp3tag != null) { ID3v24Tag convertedTag; //Should use simpler copy constructor if (mp3tag instanceof ID3v23Tag) { throw new UnsupportedOperationException("Copy Constructor not called. Please type cast the argument"); } if (mp3tag instanceof ID3v24Tag) { convertedTag = (ID3v24Tag) mp3tag; } //All tags types can be converted to v2.4 so do this to simplify things else { convertedTag = new ID3v24Tag(mp3tag); } this.setLoggingFilename(convertedTag.getLoggingFilename()); //Copy Primitives copyPrimitives(convertedTag); //Copy Frames copyFrames(convertedTag); //logger.info("Created tag from a tag of a different version"); } } /** * Creates a new ID3v2_3 datatype. * * @param buffer * @param loggingFilename * @throws TagException */ public ID3v23Tag(ByteBuffer buffer, String loggingFilename) throws TagException { setLoggingFilename(loggingFilename); this.read(buffer); } /** * Creates a new ID3v2_3 datatype. * * @param buffer * @throws TagException * @deprecated use {@link #ID3v23Tag(ByteBuffer, String)} instead */ public ID3v23Tag(ByteBuffer buffer) throws TagException { this(buffer, ""); } /** * @return textual tag identifier */ public String getIdentifier() { return "ID3v2.30"; } /** * Return frame size based upon the sizes of the tags rather than the physical * no of bytes between start of ID3Tag and start of Audio Data. * <p/> * TODO this is incorrect, because of subclasses * * @return size of tag */ public int getSize() { int size = TAG_HEADER_LENGTH; if (extended) { size += TAG_EXT_HEADER_LENGTH; if (crcDataFlag) { size += TAG_EXT_HEADER_CRC_LENGTH; } } size += super.getSize(); return size; } /** * Is Tag Equivalent to another tag * * @param obj * @return true if tag is equivalent to another */ public boolean equals(Object obj) { if (!(obj instanceof ID3v23Tag)) { return false; } ID3v23Tag object = (ID3v23Tag) obj; if (this.crc32 != object.crc32) { return false; } if (this.crcDataFlag != object.crcDataFlag) { return false; } if (this.experimental != object.experimental) { return false; } if (this.extended != object.extended) { return false; } return this.paddingSize == object.paddingSize && super.equals(obj); } /** * Read the size of a tag, based on the value written in the tag header * * @param buffer * @return * @throws TagException */ public int readSize(ByteBuffer buffer) { //Skip over flags byte flags = buffer.get(); // Read the size, this is size of tag not including the tag header int size = ID3SyncSafeInteger.bufferToValue(buffer); //Return the exact size of tag as setField in the tag header return size + TAG_HEADER_LENGTH; } /** * Read header flags * <p/> * <p>Log info messages for flags that have been set and log warnings when bits have been set for unknown flags</p> * * @param buffer * @throws TagException */ private void readHeaderFlags(ByteBuffer buffer) throws TagException { //Allowable Flags byte flags = buffer.get(); unsynchronization = (flags & MASK_V23_UNSYNCHRONIZATION) != 0; extended = (flags & MASK_V23_EXTENDED_HEADER) != 0; experimental = (flags & MASK_V23_EXPERIMENTAL) != 0; //Not allowable/Unknown Flags if ((flags & FileConstants.BIT4) != 0) { //logger.warning(ErrorMessage.ID3_INVALID_OR_UNKNOWN_FLAG_SET.getMsg(getLoggingFilename(), FileConstants.BIT4)); } if ((flags & FileConstants.BIT3) != 0) { //logger.warning(ErrorMessage.ID3_INVALID_OR_UNKNOWN_FLAG_SET.getMsg(getLoggingFilename(), FileConstants.BIT3)); } if ((flags & FileConstants.BIT2) != 0) { //logger.warning(ErrorMessage.ID3_INVALID_OR_UNKNOWN_FLAG_SET.getMsg(getLoggingFilename(), FileConstants.BIT2)); } if ((flags & FileConstants.BIT1) != 0) { //logger.warning(ErrorMessage.ID3_INVALID_OR_UNKNOWN_FLAG_SET.getMsg(getLoggingFilename(), FileConstants.BIT1)); } if ((flags & FileConstants.BIT0) != 0) { //logger.warning(ErrorMessage.ID3_INVALID_OR_UNKNOWN_FLAG_SET.getMsg(getLoggingFilename(), FileConstants.BIT0)); } if (isUnsynchronization()) { //logger.info(ErrorMessage.ID3_TAG_UNSYNCHRONIZED.getMsg(getLoggingFilename())); } if (extended) { //logger.info(ErrorMessage.ID3_TAG_EXTENDED.getMsg(getLoggingFilename())); } if (experimental) { //logger.info(ErrorMessage.ID3_TAG_EXPERIMENTAL.getMsg(getLoggingFilename())); } } /** * Read the optional extended header * * @param buffer * @param size */ private void readExtendedHeader(ByteBuffer buffer, int size) { // Int is 4 bytes. int extendedHeaderSize = buffer.getInt(); // Extended header without CRC Data if (extendedHeaderSize == TAG_EXT_HEADER_DATA_LENGTH) { //Flag should not be setField , if is log a warning byte extFlag = buffer.get(); crcDataFlag = (extFlag & MASK_V23_CRC_DATA_PRESENT) != 0; if (crcDataFlag) { //logger.warning(ErrorMessage.ID3_TAG_CRC_FLAG_SET_INCORRECTLY.getMsg(getLoggingFilename())); } //2nd Flag Byte (not used) buffer.get(); //Take padding and ext header size off the size to be read paddingSize = buffer.getInt(); if (paddingSize > 0) { //logger.info(ErrorMessage.ID3_TAG_PADDING_SIZE.getMsg(getLoggingFilename(), paddingSize)); } size = size - (paddingSize + TAG_EXT_HEADER_LENGTH); } else if (extendedHeaderSize == TAG_EXT_HEADER_DATA_LENGTH + TAG_EXT_HEADER_CRC_LENGTH) { //logger.info(ErrorMessage.ID3_TAG_CRC.getMsg(getLoggingFilename())); //Flag should be setField, if nor just act as if it is byte extFlag = buffer.get(); crcDataFlag = (extFlag & MASK_V23_CRC_DATA_PRESENT) != 0; if (!crcDataFlag) { //logger.warning(ErrorMessage.ID3_TAG_CRC_FLAG_SET_INCORRECTLY.getMsg(getLoggingFilename())); } //2nd Flag Byte (not used) buffer.get(); //Take padding size of size to be read paddingSize = buffer.getInt(); if (paddingSize > 0) { //logger.info(ErrorMessage.ID3_TAG_PADDING_SIZE.getMsg(getLoggingFilename(), paddingSize)); } size = size - (paddingSize + TAG_EXT_HEADER_LENGTH + TAG_EXT_HEADER_CRC_LENGTH); //CRC Data crc32 = buffer.getInt(); //logger.info(ErrorMessage.ID3_TAG_CRC_SIZE.getMsg(getLoggingFilename(), crc32)); } //Extended header size is only allowed to be six or ten bytes so this is invalid but instead //of giving up lets guess its six bytes and carry on and see if we can read file ok else { //logger.warning(ErrorMessage.ID3_EXTENDED_HEADER_SIZE_INVALID.getMsg(getLoggingFilename(), extendedHeaderSize)); buffer.position(buffer.position() - FIELD_TAG_EXT_SIZE_LENGTH); } } /** * {@inheritDoc} */ @Override public void read(ByteBuffer buffer) throws TagException { int size; if (!seek(buffer)) { throw new TagNotFoundException(getIdentifier() + " tag not found"); } //logger.info(getLoggingFilename() + ":" + "Reading ID3v23 tag"); readHeaderFlags(buffer); // Read the size, this is size of tag not including the tag header size = ID3SyncSafeInteger.bufferToValue(buffer); //logger.info(ErrorMessage.ID_TAG_SIZE.getMsg(getLoggingFilename(), size)); //Extended Header if (extended) { readExtendedHeader(buffer, size); } //Slice Buffer, so position markers tally with size (i.e do not include tagHeader) ByteBuffer bufferWithoutHeader = buffer.slice(); //We need to synchronize the buffer if (isUnsynchronization()) { bufferWithoutHeader = ID3Unsynchronization.synchronize(bufferWithoutHeader); } readFrames(bufferWithoutHeader, size); //logger.info(getLoggingFilename() + ":Loaded Frames,there are:" + frameMap.keySet().size()); } /** * Read the frames * <p/> * Read from byteBuffer upto size * * @param byteBuffer * @param size */ protected void readFrames(ByteBuffer byteBuffer, int size) { //Now start looking for frames ID3v23Frame next; frameMap = new LinkedHashMap(); encryptedFrameMap = new LinkedHashMap(); //Read the size from the Tag Header this.fileReadSize = size; //logger.finest(getLoggingFilename() + ":Start of frame body at:" + byteBuffer.position() + ",frames data size is:" + size); // Read the frames until got to up to the size as specified in header or until // we hit an invalid frame identifier or padding while (byteBuffer.position() < size) { String id; try { //Read Frame //logger.finest(getLoggingFilename() + ":Looking for next frame at:" + byteBuffer.position()); next = new ID3v23Frame(byteBuffer, getLoggingFilename()); id = next.getIdentifier(); loadFrameIntoMap(id, next); } //Found Padding, no more frames catch (PaddingException ex) { logger.config(getLoggingFilename() + ":Found padding starting at:" + byteBuffer.position()); break; } //Found Empty Frame, log it - empty frames should not exist catch (EmptyFrameException ex) { //logger.warning(getLoggingFilename() + ":Empty Frame:" + ex.getMessage()); this.emptyFrameBytes += ID3v23Frame.FRAME_HEADER_SIZE; } catch (InvalidFrameIdentifierException ifie) { //logger.warning(getLoggingFilename() + ":Invalid Frame Identifier:" + ifie.getMessage()); this.invalidFrames++; //Don't try and find any more frames break; } //Problem trying to find frame, often just occurs because frameHeader includes padding //and we have reached padding catch (InvalidFrameException ife) { //logger.warning(getLoggingFilename() + ":Invalid Frame:" + ife.getMessage()); this.invalidFrames++; //Don't try and find any more frames break; } //Failed reading frame but may just have invalid data but correct length so lets carry on //in case we can read the next frame catch (InvalidDataTypeException idete) { //logger.warning(getLoggingFilename() + ":Corrupt Frame:" + idete.getMessage()); this.invalidFrames++; continue; } } } /** * Write the ID3 header to the ByteBuffer. * <p/> * TODO Calculate the CYC Data Check * TODO Reintroduce Extended Header * * @param padding is the size of the padding portion of the tag * @param size is the size of the body data * @return ByteBuffer * @throws IOException */ private ByteBuffer writeHeaderToBuffer(int padding, int size) throws IOException { // Flags,currently we never calculate the CRC // and if we dont calculate them cant keep orig values. Tags are not // experimental and we never createField extended header to keep things simple. extended = false; experimental = false; crcDataFlag = false; // Create Header Buffer,allocate maximum possible size for the header ByteBuffer headerBuffer = ByteBuffer. allocate(TAG_HEADER_LENGTH + TAG_EXT_HEADER_LENGTH + TAG_EXT_HEADER_CRC_LENGTH); //TAGID headerBuffer.put(TAG_ID); //Major Version headerBuffer.put(getMajorVersion()); //Minor Version headerBuffer.put(getRevision()); //Flags byte flagsByte = 0; if (isUnsynchronization()) { flagsByte |= MASK_V23_UNSYNCHRONIZATION; } if (extended) { flagsByte |= MASK_V23_EXTENDED_HEADER; } if (experimental) { flagsByte |= MASK_V23_EXPERIMENTAL; } headerBuffer.put(flagsByte); //Additional Header Size,(for completeness we never actually write the extended header) int additionalHeaderSize = 0; if (extended) { additionalHeaderSize += TAG_EXT_HEADER_LENGTH; if (crcDataFlag) { additionalHeaderSize += TAG_EXT_HEADER_CRC_LENGTH; } } //Size As Recorded in Header, don't include the main header length headerBuffer.put(ID3SyncSafeInteger.valueToBuffer(padding + size + additionalHeaderSize)); //Write Extended Header if (extended) { byte extFlagsByte1 = 0; byte extFlagsByte2 = 0; //Contains CRCData if (crcDataFlag) { headerBuffer.putInt(TAG_EXT_HEADER_DATA_LENGTH + TAG_EXT_HEADER_CRC_LENGTH); extFlagsByte1 |= MASK_V23_CRC_DATA_PRESENT; headerBuffer.put(extFlagsByte1); headerBuffer.put(extFlagsByte2); headerBuffer.putInt(paddingSize); headerBuffer.putInt(crc32); } //Just extended Header else { headerBuffer.putInt(TAG_EXT_HEADER_DATA_LENGTH); headerBuffer.put(extFlagsByte1); headerBuffer.put(extFlagsByte2); //Newly Calculated Padding As Recorded in Extended Header headerBuffer.putInt(padding); } } headerBuffer.flip(); return headerBuffer; } /** * Write tag to file * <p/> * TODO:we currently never write the Extended header , but if we did the size calculation in this * method would be slightly incorrect * * @param file The file to write to * @throws IOException */ public void write(File file, long audioStartLocation) throws IOException { setLoggingFilename(file.getName()); //logger.info("Writing tag to file:" + getLoggingFilename()); //Write Body Buffer byte[] bodyByteBuffer = writeFramesToBuffer().toByteArray(); //logger.info(getLoggingFilename() + ":bodybytebuffer:sizebeforeunsynchronisation:" + bodyByteBuffer.length); // Unsynchronize if option enabled and unsync required unsynchronization = TagOptionSingleton.getInstance().isUnsyncTags() && ID3Unsynchronization.requiresUnsynchronization(bodyByteBuffer); if (isUnsynchronization()) { bodyByteBuffer = ID3Unsynchronization.unsynchronize(bodyByteBuffer); //logger.info(getLoggingFilename() + ":bodybytebuffer:sizeafterunsynchronisation:" + bodyByteBuffer.length); } int sizeIncPadding = calculateTagSize(bodyByteBuffer.length + TAG_HEADER_LENGTH, (int) audioStartLocation); int padding = sizeIncPadding - (bodyByteBuffer.length + TAG_HEADER_LENGTH); //logger.info(getLoggingFilename() + ":Current audiostart:" + audioStartLocation); //logger.info(getLoggingFilename() + ":Size including padding:" + sizeIncPadding); //logger.info(getLoggingFilename() + ":Padding:" + padding); ByteBuffer headerBuffer = writeHeaderToBuffer(padding, bodyByteBuffer.length); writeBufferToFile(file, headerBuffer, bodyByteBuffer, padding, sizeIncPadding, audioStartLocation); } /** * {@inheritDoc} */ @Override public void write(WritableByteChannel channel) throws IOException { //logger.info(getLoggingFilename() + ":Writing tag to channel"); byte[] bodyByteBuffer = writeFramesToBuffer().toByteArray(); //logger.info(getLoggingFilename() + ":bodybytebuffer:sizebeforeunsynchronisation:" + bodyByteBuffer.length); // Unsynchronize if option enabled and unsync required unsynchronization = TagOptionSingleton.getInstance().isUnsyncTags() && ID3Unsynchronization.requiresUnsynchronization(bodyByteBuffer); if (isUnsynchronization()) { bodyByteBuffer = ID3Unsynchronization.unsynchronize(bodyByteBuffer); //logger.info(getLoggingFilename() + ":bodybytebuffer:sizeafterunsynchronisation:" + bodyByteBuffer.length); } ByteBuffer headerBuffer = writeHeaderToBuffer(0, bodyByteBuffer.length); channel.write(headerBuffer); channel.write(ByteBuffer.wrap(bodyByteBuffer)); } /** * For representing the MP3File in an XML Format */ public void createStructure() { MP3File.getStructureFormatter().openHeadingElement(TYPE_TAG, getIdentifier()); super.createStructureHeader(); //Header MP3File.getStructureFormatter().openHeadingElement(TYPE_HEADER, ""); MP3File.getStructureFormatter().addElement(TYPE_UNSYNCHRONISATION, this.isUnsynchronization()); MP3File.getStructureFormatter().addElement(TYPE_EXTENDED, this.extended); MP3File.getStructureFormatter().addElement(TYPE_EXPERIMENTAL, this.experimental); MP3File.getStructureFormatter().addElement(TYPE_CRCDATA, this.crc32); MP3File.getStructureFormatter().addElement(TYPE_PADDINGSIZE, this.paddingSize); MP3File.getStructureFormatter().closeHeadingElement(TYPE_HEADER); //Body super.createStructureBody(); MP3File.getStructureFormatter().closeHeadingElement(TYPE_TAG); } /** * @return is tag unsynchronized */ public boolean isUnsynchronization() { return unsynchronization; } public ID3v23Frame createFrame(String id) { return new ID3v23Frame(id); } /** * Create Frame for Id3 Key * <p/> * Only textual data supported at the moment, should only be used with frames that * support a simple string argument. * * @param id3Key * @param value * @return * @throws KeyNotFoundException * @throws FieldDataInvalidException */ public TagField createField(ID3v23FieldKey id3Key, String value) throws KeyNotFoundException, FieldDataInvalidException { if (id3Key == null) { throw new KeyNotFoundException(); } return super.doCreateTagField(new FrameAndSubId(id3Key.getFrameId(), id3Key.getSubId()), value); } /** * Retrieve the first value that exists for this id3v23key * * @param id3v23FieldKey * @return * @throws org.jaudiotagger.tag.KeyNotFoundException * */ public String getFirst(ID3v23FieldKey id3v23FieldKey) throws KeyNotFoundException { if (id3v23FieldKey == null) { throw new KeyNotFoundException(); } FrameAndSubId frameAndSubId = new FrameAndSubId(id3v23FieldKey.getFrameId(), id3v23FieldKey.getSubId()); if (id3v23FieldKey == ID3v23FieldKey.TRACK) { AbstractID3v2Frame frame = getFirstField(frameAndSubId.getFrameId()); return String.valueOf(((FrameBodyTRCK) frame.getBody()).getTrackNo()); } else if (id3v23FieldKey == ID3v23FieldKey.TRACK_TOTAL) { AbstractID3v2Frame frame = getFirstField(frameAndSubId.getFrameId()); return String.valueOf(((FrameBodyTRCK) frame.getBody()).getTrackTotal()); } else if (id3v23FieldKey == ID3v23FieldKey.DISC_NO) { AbstractID3v2Frame frame = getFirstField(frameAndSubId.getFrameId()); return String.valueOf(((FrameBodyTPOS) frame.getBody()).getDiscNo()); } else if (id3v23FieldKey == ID3v23FieldKey.DISC_TOTAL) { AbstractID3v2Frame frame = getFirstField(frameAndSubId.getFrameId()); return String.valueOf(((FrameBodyTPOS) frame.getBody()).getDiscTotal()); } else { return super.doGetValueAtIndex(frameAndSubId, 0); } } /** * Delete fields with this id3v23FieldKey * * @param id3v23FieldKey * @throws org.jaudiotagger.tag.KeyNotFoundException * */ public void deleteField(ID3v23FieldKey id3v23FieldKey) throws KeyNotFoundException { if (id3v23FieldKey == null) { throw new KeyNotFoundException(); } super.doDeleteTagField(new FrameAndSubId(id3v23FieldKey.getFrameId(), id3v23FieldKey.getSubId())); } /** * Delete fields with this (frame) id * * @param id */ public void deleteField(String id) { super.doDeleteTagField(new FrameAndSubId(id, null)); } protected FrameAndSubId getFrameAndSubIdFromGenericKey(FieldKey genericKey) { ID3v23FieldKey id3v23FieldKey = ID3v23Frames.getInstanceOf().getId3KeyFromGenericKey(genericKey); if (id3v23FieldKey == null) { throw new KeyNotFoundException(); } return new FrameAndSubId(id3v23FieldKey.getFrameId(), id3v23FieldKey.getSubId()); } protected ID3Frames getID3Frames() { return ID3v23Frames.getInstanceOf(); } /** * @return comparator used to order frames in preferred order for writing to file * so that most important frames are written first. */ public Comparator getPreferredFrameOrderComparator() { return ID3v23PreferredFrameOrderComparator.getInstanceof(); } /** * {@inheritDoc} */ public List<Artwork> getArtworkList() { List<TagField> coverartList = getFields(FieldKey.COVER_ART); List<Artwork> artworkList = new ArrayList<Artwork>(coverartList.size()); for (TagField next : coverartList) { FrameBodyAPIC coverArt = (FrameBodyAPIC) ((AbstractID3v2Frame) next).getBody(); Artwork artwork = new Artwork(); artwork.setMimeType(coverArt.getMimeType()); artwork.setPictureType(coverArt.getPictureType()); if (coverArt.isImageUrl()) { artwork.setLinked(true); artwork.setImageUrl(coverArt.getImageUrl()); } else { artwork.setBinaryData(coverArt.getImageData()); } artworkList.add(artwork); } return artworkList; } /** * {@inheritDoc} */ public TagField createField(Artwork artwork) throws FieldDataInvalidException { AbstractID3v2Frame frame = createFrame(getFrameAndSubIdFromGenericKey(FieldKey.COVER_ART).getFrameId()); FrameBodyAPIC body = (FrameBodyAPIC) frame.getBody(); body.setObjectValue(DataTypes.OBJ_PICTURE_DATA, artwork.getBinaryData()); body.setObjectValue(DataTypes.OBJ_PICTURE_TYPE, artwork.getPictureType()); body.setObjectValue(DataTypes.OBJ_MIME_TYPE, artwork.getMimeType()); body.setObjectValue(DataTypes.OBJ_DESCRIPTION, ""); return frame; } /** * Create Artwork * * @param data * @param mimeType of the image * @return * @see PictureTypes */ public TagField createArtworkField(byte[] data, String mimeType) { AbstractID3v2Frame frame = createFrame(getFrameAndSubIdFromGenericKey(FieldKey.COVER_ART).getFrameId()); FrameBodyAPIC body = (FrameBodyAPIC) frame.getBody(); body.setObjectValue(DataTypes.OBJ_PICTURE_DATA, data); body.setObjectValue(DataTypes.OBJ_PICTURE_TYPE, PictureTypes.DEFAULT_ID); body.setObjectValue(DataTypes.OBJ_MIME_TYPE, mimeType); body.setObjectValue(DataTypes.OBJ_DESCRIPTION, ""); return frame; } public int getPaddingSize() { return paddingSize; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.security; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; import org.apache.hadoop.security.token.delegation.DelegationKey; import org.apache.hadoop.util.StringUtils; import org.apache.thrift.transport.TSaslServerTransport; import org.apache.thrift.transport.TTransportException; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.ServerSocket; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.List; import java.util.Map; public class TestHadoopAuthBridge23 { /** * set to true when metastore token manager has intitialized token manager * through call to HadoopThriftAuthBridge23.Server.startDelegationTokenSecretManager */ static volatile boolean isMetastoreTokenManagerInited; public static class MyTokenStore extends MemoryTokenStore { static volatile DelegationTokenStore TOKEN_STORE = null; public void init(Object hmsHandler, HadoopThriftAuthBridge.Server.ServerMode smode) throws TokenStoreException { super.init(hmsHandler, smode); TOKEN_STORE = this; try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } isMetastoreTokenManagerInited = true; } } private static class MyHadoopThriftAuthBridge23 extends HadoopThriftAuthBridge23 { @Override public Server createServer(String keytabFile, String principalConf, String clientConf) throws TTransportException { //Create a Server that doesn't interpret any Kerberos stuff return new Server(); } static class Server extends HadoopThriftAuthBridge.Server { public Server() throws TTransportException { super(); } @Override public TSaslServerTransport.Factory createSaslServerTransportFactory(Map<String, String> saslProps) { TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory(); transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, saslProps, new SaslDigestCallbackHandler(secretManager)); return transFactory; } } } private HiveConf conf; private void configureSuperUserIPAddresses(Configuration conf, String superUserShortName) throws IOException { List<String> ipList = new ArrayList<String>(); Enumeration<NetworkInterface> netInterfaceList = NetworkInterface .getNetworkInterfaces(); while (netInterfaceList.hasMoreElements()) { NetworkInterface inf = netInterfaceList.nextElement(); Enumeration<InetAddress> addrList = inf.getInetAddresses(); while (addrList.hasMoreElements()) { InetAddress addr = addrList.nextElement(); ipList.add(addr.getHostAddress()); } } StringBuilder builder = new StringBuilder(); for (String ip : ipList) { builder.append(ip); builder.append(','); } builder.append("127.0.1.1,"); builder.append(InetAddress.getLocalHost().getCanonicalHostName()); conf.setStrings(DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(superUserShortName), builder.toString()); } @Before public void setup() throws Exception { isMetastoreTokenManagerInited = false; System.setProperty(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true"); System.setProperty(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS.varname, MyTokenStore.class.getName()); conf = new HiveConf(TestHadoopAuthBridge23.class); MetaStoreTestUtils.startMetaStoreWithRetry(new MyHadoopThriftAuthBridge23(), conf); } /** * Test delegation token store/load from shared store. * @throws Exception */ @Test public void testDelegationTokenSharedStore() throws Exception { UserGroupInformation clientUgi = UserGroupInformation.getCurrentUser(); TokenStoreDelegationTokenSecretManager tokenManager = new TokenStoreDelegationTokenSecretManager(0, 60*60*1000, 60*60*1000, 0, MyTokenStore.TOKEN_STORE); // initializes current key tokenManager.startThreads(); tokenManager.stopThreads(); String tokenStrForm = tokenManager.getDelegationToken(clientUgi.getShortUserName(), clientUgi.getShortUserName()); Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>(); t.decodeFromUrlString(tokenStrForm); //check whether the username in the token is what we expect DelegationTokenIdentifier d = new DelegationTokenIdentifier(); d.readFields(new DataInputStream(new ByteArrayInputStream( t.getIdentifier()))); Assert.assertTrue("Usernames don't match", clientUgi.getShortUserName().equals(d.getUser().getShortUserName())); DelegationTokenInformation tokenInfo = MyTokenStore.TOKEN_STORE .getToken(d); Assert.assertNotNull("token not in store", tokenInfo); Assert.assertFalse("duplicate token add", MyTokenStore.TOKEN_STORE.addToken(d, tokenInfo)); // check keys are copied from token store when token is loaded TokenStoreDelegationTokenSecretManager anotherManager = new TokenStoreDelegationTokenSecretManager(0, 0, 0, 0, MyTokenStore.TOKEN_STORE); Assert.assertEquals("master keys empty on init", 0, anotherManager.getAllKeys().length); Assert.assertNotNull("token loaded", anotherManager.retrievePassword(d)); anotherManager.renewToken(t, clientUgi.getShortUserName()); Assert.assertEquals("master keys not loaded from store", MyTokenStore.TOKEN_STORE.getMasterKeys().length, anotherManager.getAllKeys().length); // cancel the delegation token tokenManager.cancelDelegationToken(tokenStrForm); Assert.assertNull("token not removed from store after cancel", MyTokenStore.TOKEN_STORE.getToken(d)); Assert.assertFalse("token removed (again)", MyTokenStore.TOKEN_STORE.removeToken(d)); try { anotherManager.retrievePassword(d); Assert.fail("InvalidToken expected after cancel"); } catch (InvalidToken ex) { // expected } // token expiration MyTokenStore.TOKEN_STORE.addToken(d, new DelegationTokenInformation(0, t.getPassword())); Assert.assertNotNull(MyTokenStore.TOKEN_STORE.getToken(d)); anotherManager.removeExpiredTokens(); Assert.assertNull("Expired token not removed", MyTokenStore.TOKEN_STORE.getToken(d)); // key expiration - create an already expired key anotherManager.startThreads(); // generates initial key anotherManager.stopThreads(); DelegationKey expiredKey = new DelegationKey(-1, 0, anotherManager.getAllKeys()[0].getKey()); anotherManager.logUpdateMasterKey(expiredKey); // updates key with sequence number Assert.assertTrue("expired key not in allKeys", anotherManager.reloadKeys().containsKey(expiredKey.getKeyId())); anotherManager.rollMasterKeyExt(); Assert.assertFalse("Expired key not removed", anotherManager.reloadKeys().containsKey(expiredKey.getKeyId())); } @Test public void testSaslWithHiveMetaStore() throws Exception { setup(); UserGroupInformation clientUgi = UserGroupInformation.getCurrentUser(); obtainTokenAndAddIntoUGI(clientUgi, null); obtainTokenAndAddIntoUGI(clientUgi, "tokenForFooTablePartition"); } @Test public void testMetastoreProxyUser() throws Exception { setup(); final String proxyUserName = "proxyUser"; //set the configuration up such that proxyUser can act on //behalf of all users belonging to the group foo_bar_group ( //a dummy group) String[] groupNames = new String[] { "foo_bar_group" }; setGroupsInConf(groupNames, proxyUserName); final UserGroupInformation delegationTokenUser = UserGroupInformation.getCurrentUser(); final UserGroupInformation proxyUserUgi = UserGroupInformation.createRemoteUser(proxyUserName); String tokenStrForm = proxyUserUgi.doAs(new PrivilegedExceptionAction<String>() { public String run() throws Exception { try { //Since the user running the test won't belong to a non-existent group //foo_bar_group, the call to getDelegationTokenStr will fail return getDelegationTokenStr(delegationTokenUser, proxyUserUgi); } catch (AuthorizationException ae) { return null; } } }); Assert.assertTrue("Expected the getDelegationToken call to fail", tokenStrForm == null); //set the configuration up such that proxyUser can act on //behalf of all users belonging to the real group(s) that the //user running the test belongs to setGroupsInConf(UserGroupInformation.getCurrentUser().getGroupNames(), proxyUserName); tokenStrForm = proxyUserUgi.doAs(new PrivilegedExceptionAction<String>() { public String run() throws Exception { try { //Since the user running the test belongs to the group //obtained above the call to getDelegationTokenStr will succeed return getDelegationTokenStr(delegationTokenUser, proxyUserUgi); } catch (AuthorizationException ae) { return null; } } }); Assert.assertTrue("Expected the getDelegationToken call to not fail", tokenStrForm != null); Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>(); t.decodeFromUrlString(tokenStrForm); //check whether the username in the token is what we expect DelegationTokenIdentifier d = new DelegationTokenIdentifier(); d.readFields(new DataInputStream(new ByteArrayInputStream( t.getIdentifier()))); Assert.assertTrue("Usernames don't match", delegationTokenUser.getShortUserName().equals(d.getUser().getShortUserName())); } private void setGroupsInConf(String[] groupNames, String proxyUserName) throws IOException { conf.set( DefaultImpersonationProvider.getTestProvider().getProxySuperuserGroupConfKey(proxyUserName), StringUtils.join(",", Arrays.asList(groupNames))); configureSuperUserIPAddresses(conf, proxyUserName); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); } private String getDelegationTokenStr(UserGroupInformation ownerUgi, UserGroupInformation realUgi) throws Exception { //obtain a token by directly invoking the metastore operation(without going //through the thrift interface). Obtaining a token makes the secret manager //aware of the user and that it gave the token to the user //also set the authentication method explicitly to KERBEROS. Since the //metastore checks whether the authentication method is KERBEROS or not //for getDelegationToken, and the testcases don't use //kerberos, this needs to be done waitForMetastoreTokenInit(); HadoopThriftAuthBridge.Server.authenticationMethod .set(AuthenticationMethod.KERBEROS); return HiveMetaStore.getDelegationToken(ownerUgi.getShortUserName(), realUgi.getShortUserName(), InetAddress.getLocalHost().getHostAddress()); } /** * Wait for metastore to have initialized token manager * This does not have to be done in other metastore test cases as they * use metastore client which will retry few times on failure * @throws InterruptedException */ private void waitForMetastoreTokenInit() throws InterruptedException { int waitAttempts = 30; while(waitAttempts > 0 && !isMetastoreTokenManagerInited){ Thread.sleep(1000); waitAttempts--; } } private void obtainTokenAndAddIntoUGI(UserGroupInformation clientUgi, String tokenSig) throws Exception { String tokenStrForm = getDelegationTokenStr(clientUgi, clientUgi); Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>(); t.decodeFromUrlString(tokenStrForm); //check whether the username in the token is what we expect DelegationTokenIdentifier d = new DelegationTokenIdentifier(); d.readFields(new DataInputStream(new ByteArrayInputStream( t.getIdentifier()))); Assert.assertTrue("Usernames don't match", clientUgi.getShortUserName().equals(d.getUser().getShortUserName())); if (tokenSig != null) { conf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, tokenSig); t.setService(new Text(tokenSig)); } //add the token to the clientUgi for securely talking to the metastore clientUgi.addToken(t); //Create the metastore client as the clientUgi. Doing so this //way will give the client access to the token that was added earlier //in the clientUgi HiveMetaStoreClient hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() { public HiveMetaStoreClient run() throws Exception { HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf); return hiveClient; } }); Assert.assertTrue("Couldn't connect to metastore", hiveClient != null); //try out some metastore operations createDBAndVerifyExistence(hiveClient); hiveClient.close(); //Now cancel the delegation token HiveMetaStore.cancelDelegationToken(tokenStrForm); //now metastore connection should fail hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() { public HiveMetaStoreClient run() { try { return new HiveMetaStoreClient(conf); } catch (MetaException e) { return null; } } }); Assert.assertTrue("Expected metastore operations to fail", hiveClient == null); } private void createDBAndVerifyExistence(HiveMetaStoreClient client) throws Exception { String dbName = "simpdb"; Database db = new Database(); db.setName(dbName); client.createDatabase(db); Database db1 = client.getDatabase(dbName); client.dropDatabase(dbName); Assert.assertTrue("Databases do not match", db1.getName().equals(db.getName())); } private int findFreePort() throws IOException { ServerSocket socket= new ServerSocket(0); int port = socket.getLocalPort(); socket.close(); return port; } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointOnErrorSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeFirstInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeSecondInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MessageInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider; /** * @generated */ public class EsbModelingAssistantProviderOfSendMediatorOutputConnectorEditPart extends EsbModelingAssistantProvider { /** * @generated */ @Override public List<IElementType> getRelTypesOnSource(IAdaptable source) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSource((SendMediatorOutputConnectorEditPart) sourceEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSource(SendMediatorOutputConnectorEditPart source) { List<IElementType> types = new ArrayList<IElementType>(1); types.add(EsbElementTypes.EsbLink_4001); return types; } /** * @generated */ @Override public List<IElementType> getRelTypesOnSourceAndTarget(IAdaptable source, IAdaptable target) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSourceAndTarget((SendMediatorOutputConnectorEditPart) sourceEditPart, targetEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSourceAndTarget(SendMediatorOutputConnectorEditPart source, IGraphicalEditPart targetEditPart) { List<IElementType> types = new LinkedList<IElementType>(); if (targetEditPart instanceof ProxyInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DropMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LogMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SendMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof StoreMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BuilderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallTemplateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PayloadFactoryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnqueueMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof URLRewriteMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ValidateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ConditionalRouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BAMMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BeanMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EJBMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof NamedEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressingEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorOperationInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoopBackMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RespondMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DataMapperMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FastXSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ForEachMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PublishEventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MessageInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequencesInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof InboundEndpointSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof InboundEndpointOnErrorSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } return types; } /** * @generated */ @Override public List<IElementType> getTypesForTarget(IAdaptable source, IElementType relationshipType) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); return doGetTypesForTarget((SendMediatorOutputConnectorEditPart) sourceEditPart, relationshipType); } /** * @generated */ public List<IElementType> doGetTypesForTarget(SendMediatorOutputConnectorEditPart source, IElementType relationshipType) { List<IElementType> types = new ArrayList<IElementType>(); if (relationshipType == EsbElementTypes.EsbLink_4001) { types.add(EsbElementTypes.ProxyInputConnector_3003); types.add(EsbElementTypes.ProxyFaultInputConnector_3489); types.add(EsbElementTypes.DropMediatorInputConnector_3008); types.add(EsbElementTypes.PropertyMediatorInputConnector_3033); types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121); types.add(EsbElementTypes.FilterMediatorInputConnector_3010); types.add(EsbElementTypes.LogMediatorInputConnector_3018); types.add(EsbElementTypes.EnrichMediatorInputConnector_3036); types.add(EsbElementTypes.XSLTMediatorInputConnector_3039); types.add(EsbElementTypes.SwitchMediatorInputConnector_3042); types.add(EsbElementTypes.SequenceInputConnector_3049); types.add(EsbElementTypes.EventMediatorInputConnector_3052); types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055); types.add(EsbElementTypes.ClassMediatorInputConnector_3058); types.add(EsbElementTypes.SpringMediatorInputConnector_3061); types.add(EsbElementTypes.ScriptMediatorInputConnector_3064); types.add(EsbElementTypes.FaultMediatorInputConnector_3067); types.add(EsbElementTypes.XQueryMediatorInputConnector_3070); types.add(EsbElementTypes.CommandMediatorInputConnector_3073); types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076); types.add(EsbElementTypes.DBReportMediatorInputConnector_3079); types.add(EsbElementTypes.SmooksMediatorInputConnector_3082); types.add(EsbElementTypes.SendMediatorInputConnector_3085); types.add(EsbElementTypes.HeaderMediatorInputConnector_3100); types.add(EsbElementTypes.CloneMediatorInputConnector_3103); types.add(EsbElementTypes.CacheMediatorInputConnector_3106); types.add(EsbElementTypes.IterateMediatorInputConnector_3109); types.add(EsbElementTypes.CalloutMediatorInputConnector_3115); types.add(EsbElementTypes.TransactionMediatorInputConnector_3118); types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124); types.add(EsbElementTypes.RuleMediatorInputConnector_3127); types.add(EsbElementTypes.OAuthMediatorInputConnector_3130); types.add(EsbElementTypes.AggregateMediatorInputConnector_3112); types.add(EsbElementTypes.StoreMediatorInputConnector_3589); types.add(EsbElementTypes.BuilderMediatorInputConnector_3592); types.add(EsbElementTypes.CallTemplateMediatorInputConnector_3595); types.add(EsbElementTypes.PayloadFactoryMediatorInputConnector_3598); types.add(EsbElementTypes.EnqueueMediatorInputConnector_3601); types.add(EsbElementTypes.URLRewriteMediatorInputConnector_3621); types.add(EsbElementTypes.ValidateMediatorInputConnector_3624); types.add(EsbElementTypes.RouterMediatorInputConnector_3629); types.add(EsbElementTypes.ConditionalRouterMediatorInputConnector_3636); types.add(EsbElementTypes.BAMMediatorInputConnector_3681); types.add(EsbElementTypes.BeanMediatorInputConnector_3684); types.add(EsbElementTypes.EJBMediatorInputConnector_3687); types.add(EsbElementTypes.DefaultEndPointInputConnector_3021); types.add(EsbElementTypes.AddressEndPointInputConnector_3030); types.add(EsbElementTypes.FailoverEndPointInputConnector_3088); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3693); types.add(EsbElementTypes.WSDLEndPointInputConnector_3092); types.add(EsbElementTypes.NamedEndpointInputConnector_3661); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095); types.add(EsbElementTypes.APIResourceEndpointInputConnector_3675); types.add(EsbElementTypes.AddressingEndpointInputConnector_3690); types.add(EsbElementTypes.HTTPEndPointInputConnector_3710); types.add(EsbElementTypes.TemplateEndpointInputConnector_3717); types.add(EsbElementTypes.CloudConnectorInputConnector_3720); types.add(EsbElementTypes.CloudConnectorOperationInputConnector_3723); types.add(EsbElementTypes.LoopBackMediatorInputConnector_3737); types.add(EsbElementTypes.RespondMediatorInputConnector_3740); types.add(EsbElementTypes.CallMediatorInputConnector_3743); types.add(EsbElementTypes.DataMapperMediatorInputConnector_3762); types.add(EsbElementTypes.FastXSLTMediatorInputConnector_3765); types.add(EsbElementTypes.ForEachMediatorInputConnector_3781); types.add(EsbElementTypes.PublishEventMediatorInputConnector_3786); types.add(EsbElementTypes.ProxyInSequenceInputConnector_3731); types.add(EsbElementTypes.MessageInputConnector_3046); types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014); types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015); types.add(EsbElementTypes.SequencesInputConnector_3616); types.add(EsbElementTypes.DefaultEndPointInputConnector_3644); types.add(EsbElementTypes.AddressEndPointInputConnector_3647); types.add(EsbElementTypes.FailoverEndPointInputConnector_3650); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3697); types.add(EsbElementTypes.WSDLEndPointInputConnector_3654); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3657); types.add(EsbElementTypes.HTTPEndPointInputConnector_3713); types.add(EsbElementTypes.TemplateEndpointInputConnector_3726); types.add(EsbElementTypes.APIResourceInputConnector_3670); types.add(EsbElementTypes.APIResourceFaultInputConnector_3672); types.add(EsbElementTypes.APIResourceInSequenceInputConnector_3747); types.add(EsbElementTypes.InboundEndpointSequenceInputConnector_3768); types.add(EsbElementTypes.InboundEndpointOnErrorSequenceInputConnector_3770); } return types; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.net; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import io.netty.channel.WriteBufferWaterMark; import org.apache.cassandra.auth.IInternodeAuthenticator; import org.apache.cassandra.config.Config; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.EncryptionOptions; import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions; import org.apache.cassandra.db.SystemKeyspace; import org.apache.cassandra.locator.IEndpointSnitch; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.utils.FBUtilities; import static org.apache.cassandra.config.DatabaseDescriptor.getEndpointSnitch; import static org.apache.cassandra.net.MessagingService.VERSION_40; import static org.apache.cassandra.net.MessagingService.instance; import static org.apache.cassandra.utils.FBUtilities.getBroadcastAddressAndPort; /** * A collection of settings to be passed around for outbound connections. */ @SuppressWarnings({ "WeakerAccess", "unused" }) public class OutboundConnectionSettings { private static final String INTRADC_TCP_NODELAY_PROPERTY = Config.PROPERTY_PREFIX + "otc_intradc_tcp_nodelay"; /** * Enabled/disable TCP_NODELAY for intradc connections. Defaults to enabled. */ private static final boolean INTRADC_TCP_NODELAY = Boolean.parseBoolean(System.getProperty(INTRADC_TCP_NODELAY_PROPERTY, "true")); public enum Framing { // for < VERSION_40, implies no framing // for >= VERSION_40, uses simple unprotected frames with header crc but no payload protection UNPROTECTED(0), // for < VERSION_40, uses the jpountz framing format // for >= VERSION_40, uses our framing format with header crc24 LZ4(1), // for < VERSION_40, implies UNPROTECTED // for >= VERSION_40, uses simple frames with separate header and payload crc CRC(2); public static Framing forId(int id) { switch (id) { case 0: return UNPROTECTED; case 1: return LZ4; case 2: return CRC; } throw new IllegalStateException(); } final int id; Framing(int id) { this.id = id; } } public final IInternodeAuthenticator authenticator; public final InetAddressAndPort to; public final InetAddressAndPort connectTo; // may be represented by a different IP address on this node's local network public final EncryptionOptions encryption; public final Framing framing; public final Integer socketSendBufferSizeInBytes; public final Integer applicationSendQueueCapacityInBytes; public final Integer applicationSendQueueReserveEndpointCapacityInBytes; public final ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes; public final Boolean tcpNoDelay; public final int flushLowWaterMark, flushHighWaterMark; public final Integer tcpConnectTimeoutInMS; public final Integer tcpUserTimeoutInMS; public final AcceptVersions acceptVersions; public final InetAddressAndPort from; public final SocketFactory socketFactory; public final OutboundMessageCallbacks callbacks; public final OutboundDebugCallbacks debug; public final EndpointMessagingVersions endpointToVersion; public OutboundConnectionSettings(InetAddressAndPort to) { this(to, null); } public OutboundConnectionSettings(InetAddressAndPort to, InetAddressAndPort preferred) { this(null, to, preferred, null, null, null, null, null, null, null, 1 << 15, 1 << 16, null, null, null, null, null, null, null, null); } private OutboundConnectionSettings(IInternodeAuthenticator authenticator, InetAddressAndPort to, InetAddressAndPort connectTo, EncryptionOptions encryption, Framing framing, Integer socketSendBufferSizeInBytes, Integer applicationSendQueueCapacityInBytes, Integer applicationSendQueueReserveEndpointCapacityInBytes, ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes, Boolean tcpNoDelay, int flushLowWaterMark, int flushHighWaterMark, Integer tcpConnectTimeoutInMS, Integer tcpUserTimeoutInMS, AcceptVersions acceptVersions, InetAddressAndPort from, SocketFactory socketFactory, OutboundMessageCallbacks callbacks, OutboundDebugCallbacks debug, EndpointMessagingVersions endpointToVersion) { Preconditions.checkArgument(socketSendBufferSizeInBytes == null || socketSendBufferSizeInBytes == 0 || socketSendBufferSizeInBytes >= 1 << 10, "illegal socket send buffer size: " + socketSendBufferSizeInBytes); Preconditions.checkArgument(applicationSendQueueCapacityInBytes == null || applicationSendQueueCapacityInBytes >= 1 << 10, "illegal application send queue capacity: " + applicationSendQueueCapacityInBytes); Preconditions.checkArgument(tcpUserTimeoutInMS == null || tcpUserTimeoutInMS >= 0, "tcp user timeout must be non negative: " + tcpUserTimeoutInMS); Preconditions.checkArgument(tcpConnectTimeoutInMS == null || tcpConnectTimeoutInMS > 0, "tcp connect timeout must be positive: " + tcpConnectTimeoutInMS); this.authenticator = authenticator; this.to = to; this.connectTo = connectTo; this.encryption = encryption; this.framing = framing; this.socketSendBufferSizeInBytes = socketSendBufferSizeInBytes; this.applicationSendQueueCapacityInBytes = applicationSendQueueCapacityInBytes; this.applicationSendQueueReserveEndpointCapacityInBytes = applicationSendQueueReserveEndpointCapacityInBytes; this.applicationSendQueueReserveGlobalCapacityInBytes = applicationSendQueueReserveGlobalCapacityInBytes; this.tcpNoDelay = tcpNoDelay; this.flushLowWaterMark = flushLowWaterMark; this.flushHighWaterMark = flushHighWaterMark; this.tcpConnectTimeoutInMS = tcpConnectTimeoutInMS; this.tcpUserTimeoutInMS = tcpUserTimeoutInMS; this.acceptVersions = acceptVersions; this.from = from; this.socketFactory = socketFactory; this.callbacks = callbacks; this.debug = debug; this.endpointToVersion = endpointToVersion; } public boolean authenticate() { return authenticator.authenticate(to.address, to.port); } public boolean withEncryption() { return encryption != null; } public String toString() { return String.format("peer: (%s, %s), framing: %s, encryption: %s", to, connectTo, framing, SocketFactory.encryptionOptionsSummary(encryption)); } public OutboundConnectionSettings withAuthenticator(IInternodeAuthenticator authenticator) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } @SuppressWarnings("unused") public OutboundConnectionSettings toEndpoint(InetAddressAndPort endpoint) { return new OutboundConnectionSettings(authenticator, endpoint, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withConnectTo(InetAddressAndPort connectTo) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withEncryption(ServerEncryptionOptions encryption) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } @SuppressWarnings("unused") public OutboundConnectionSettings withFraming(Framing framing) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withSocketSendBufferSizeInBytes(int socketSendBufferSizeInBytes) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } @SuppressWarnings("unused") public OutboundConnectionSettings withApplicationSendQueueCapacityInBytes(int applicationSendQueueCapacityInBytes) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withApplicationReserveSendQueueCapacityInBytes(Integer applicationReserveSendQueueEndpointCapacityInBytes, ResourceLimits.Limit applicationReserveSendQueueGlobalCapacityInBytes) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationReserveSendQueueEndpointCapacityInBytes, applicationReserveSendQueueGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } @SuppressWarnings("unused") public OutboundConnectionSettings withTcpNoDelay(boolean tcpNoDelay) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } @SuppressWarnings("unused") public OutboundConnectionSettings withNettyBufferBounds(WriteBufferWaterMark nettyBufferBounds) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withTcpConnectTimeoutInMS(int tcpConnectTimeoutInMS) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withTcpUserTimeoutInMS(int tcpUserTimeoutInMS) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withAcceptVersions(AcceptVersions acceptVersions) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withFrom(InetAddressAndPort from) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withSocketFactory(SocketFactory socketFactory) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withCallbacks(OutboundMessageCallbacks callbacks) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withDebugCallbacks(OutboundDebugCallbacks debug) { return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes, applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes, tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS, tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion); } public OutboundConnectionSettings withDefaultReserveLimits() { Integer applicationReserveSendQueueEndpointCapacityInBytes = this.applicationSendQueueReserveEndpointCapacityInBytes; ResourceLimits.Limit applicationReserveSendQueueGlobalCapacityInBytes = this.applicationSendQueueReserveGlobalCapacityInBytes; if (applicationReserveSendQueueEndpointCapacityInBytes == null) applicationReserveSendQueueEndpointCapacityInBytes = DatabaseDescriptor.getInternodeApplicationSendQueueReserveEndpointCapacityInBytes(); if (applicationReserveSendQueueGlobalCapacityInBytes == null) applicationReserveSendQueueGlobalCapacityInBytes = MessagingService.instance().outboundGlobalReserveLimit; return withApplicationReserveSendQueueCapacityInBytes(applicationReserveSendQueueEndpointCapacityInBytes, applicationReserveSendQueueGlobalCapacityInBytes); } public IInternodeAuthenticator authenticator() { return authenticator != null ? authenticator : DatabaseDescriptor.getInternodeAuthenticator(); } public EndpointMessagingVersions endpointToVersion() { if (endpointToVersion == null) return instance().versions; return endpointToVersion; } public InetAddressAndPort from() { return from != null ? from : FBUtilities.getBroadcastAddressAndPort(); } public OutboundDebugCallbacks debug() { return debug != null ? debug : OutboundDebugCallbacks.NONE; } public EncryptionOptions encryption() { return encryption != null ? encryption : defaultEncryptionOptions(to); } public SocketFactory socketFactory() { return socketFactory != null ? socketFactory : instance().socketFactory; } public OutboundMessageCallbacks callbacks() { return callbacks != null ? callbacks : instance().callbacks; } public int socketSendBufferSizeInBytes() { return socketSendBufferSizeInBytes != null ? socketSendBufferSizeInBytes : DatabaseDescriptor.getInternodeSocketSendBufferSizeInBytes(); } public int applicationSendQueueCapacityInBytes() { return applicationSendQueueCapacityInBytes != null ? applicationSendQueueCapacityInBytes : DatabaseDescriptor.getInternodeApplicationSendQueueCapacityInBytes(); } public ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes() { return applicationSendQueueReserveGlobalCapacityInBytes != null ? applicationSendQueueReserveGlobalCapacityInBytes : instance().outboundGlobalReserveLimit; } public int applicationSendQueueReserveEndpointCapacityInBytes() { return applicationSendQueueReserveEndpointCapacityInBytes != null ? applicationSendQueueReserveEndpointCapacityInBytes : DatabaseDescriptor.getInternodeApplicationReceiveQueueReserveEndpointCapacityInBytes(); } public int tcpConnectTimeoutInMS() { return tcpConnectTimeoutInMS != null ? tcpConnectTimeoutInMS : DatabaseDescriptor.getInternodeTcpConnectTimeoutInMS(); } public int tcpUserTimeoutInMS(ConnectionCategory category) { // Reusing tcpUserTimeoutInMS for both messaging and streaming, since the connection is created for either one of them. if (tcpUserTimeoutInMS != null) return tcpUserTimeoutInMS; switch (category) { case MESSAGING: return DatabaseDescriptor.getInternodeTcpUserTimeoutInMS(); case STREAMING: return DatabaseDescriptor.getInternodeStreamingTcpUserTimeoutInMS(); default: throw new IllegalArgumentException("Unknown connection category: " + category); } } public boolean tcpNoDelay() { if (tcpNoDelay != null) return tcpNoDelay; if (isInLocalDC(getEndpointSnitch(), getBroadcastAddressAndPort(), to)) return INTRADC_TCP_NODELAY; return DatabaseDescriptor.getInterDCTcpNoDelay(); } public AcceptVersions acceptVersions(ConnectionCategory category) { return acceptVersions != null ? acceptVersions : category.isStreaming() ? MessagingService.accept_streaming : MessagingService.accept_messaging; } public OutboundConnectionSettings withLegacyPortIfNecessary(int messagingVersion) { return withConnectTo(maybeWithSecurePort(connectTo(), messagingVersion, withEncryption())); } public InetAddressAndPort connectTo() { InetAddressAndPort connectTo = this.connectTo; if (connectTo == null) connectTo = SystemKeyspace.getPreferredIP(to); return connectTo; } public String connectToId() { return !to.equals(connectTo()) ? to.toString() : to.toString() + '(' + connectTo().toString() + ')'; } public Framing framing(ConnectionCategory category) { if (framing != null) return framing; if (category.isStreaming()) return Framing.UNPROTECTED; return shouldCompressConnection(getEndpointSnitch(), getBroadcastAddressAndPort(), to) ? Framing.LZ4 : Framing.CRC; } // note that connectTo is updated even if specified, in the case of pre40 messaging and using encryption (to update port) public OutboundConnectionSettings withDefaults(ConnectionCategory category) { if (to == null) throw new IllegalArgumentException(); return new OutboundConnectionSettings(authenticator(), to, connectTo(), encryption(), framing(category), socketSendBufferSizeInBytes(), applicationSendQueueCapacityInBytes(), applicationSendQueueReserveEndpointCapacityInBytes(), applicationSendQueueReserveGlobalCapacityInBytes(), tcpNoDelay(), flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS(), tcpUserTimeoutInMS(category), acceptVersions(category), from(), socketFactory(), callbacks(), debug(), endpointToVersion()); } private static boolean isInLocalDC(IEndpointSnitch snitch, InetAddressAndPort localHost, InetAddressAndPort remoteHost) { String remoteDC = snitch.getDatacenter(remoteHost); String localDC = snitch.getDatacenter(localHost); return remoteDC != null && remoteDC.equals(localDC); } @VisibleForTesting static EncryptionOptions defaultEncryptionOptions(InetAddressAndPort endpoint) { ServerEncryptionOptions options = DatabaseDescriptor.getInternodeMessagingEncyptionOptions(); return options.shouldEncrypt(endpoint) ? options : null; } @VisibleForTesting static boolean shouldCompressConnection(IEndpointSnitch snitch, InetAddressAndPort localHost, InetAddressAndPort remoteHost) { return (DatabaseDescriptor.internodeCompression() == Config.InternodeCompression.all) || ((DatabaseDescriptor.internodeCompression() == Config.InternodeCompression.dc) && !isInLocalDC(snitch, localHost, remoteHost)); } private static InetAddressAndPort maybeWithSecurePort(InetAddressAndPort address, int messagingVersion, boolean isEncrypted) { if (!isEncrypted || messagingVersion >= VERSION_40) return address; // if we don't know the version of the peer, assume it is 4.0 (or higher) as the only time is would be lower // (as in a 3.x version) is during a cluster upgrade (from 3.x to 4.0). In that case the outbound connection will // unfortunately fail - however the peer should connect to this node (at some point), and once we learn it's version, it'll be // in versions map. thus, when we attempt to reconnect to that node, we'll have the version and we can get the correct port. // we will be able to remove this logic at 5.0. // Also as of 4.0 we will propagate the "regular" port (which will support both SSL and non-SSL) via gossip so // for SSL and version 4.0 always connect to the gossiped port because if SSL is enabled it should ALWAYS // listen for SSL on the "regular" port. return address.withPort(DatabaseDescriptor.getSSLStoragePort()); } }
package net.ghosttrails.www.mydetic.api; import android.os.AsyncTask; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.TreeMap; import net.ghosttrails.www.mydetic.exceptions.MyDeticException; import net.ghosttrails.www.mydetic.exceptions.MyDeticNoMemoryFoundException; import net.ghosttrails.www.mydetic.exceptions.MyDeticReadFailedException; import net.ghosttrails.www.mydetic.exceptions.MyDeticWriteFailedException; import org.joda.time.LocalDate; /** * Non-persistent in-RAM MemoryApi implementation. Used mainly for testing. Simulates the network by * running background threads and (optionally) randomly failing some number of them. */ public class InRamMemoryApi implements MemoryApi { private Map<String, Map<LocalDate, MemoryData>> memoryLists; private int simulatedDelayMs; private int simulatedFailureRate; /** used for simulated random failures */ private Random random; public InRamMemoryApi() { this(0); } /** @param simulatedDelayMs delay each call by this number of milliseconds. */ public InRamMemoryApi(int simulatedDelayMs) { memoryLists = new HashMap<String, Map<LocalDate, MemoryData>>(); this.simulatedDelayMs = simulatedDelayMs; this.simulatedFailureRate = 0; this.random = new Random(System.currentTimeMillis()); } /** @return The simulated failure rate */ public int getSimulatedFailureRate() { return simulatedFailureRate; } /** * Set a random rate at which API operations will fail and throw exceptions, so we can test the * app against a simulated unreliable service or network connection. * * @param simulatedFailureRate percentage value 0-100 (int) */ public void setSimulatedFailureRate(int simulatedFailureRate) { this.simulatedFailureRate = simulatedFailureRate; } public int getSimulatedDelayMs() { return simulatedDelayMs; } public void setSimulatedDelayMs(int simulatedDelayMs) { this.simulatedDelayMs = simulatedDelayMs; } private void simulatedSleep() { try { Thread.sleep(simulatedDelayMs); } catch (InterruptedException e) { // We don't really care if we get interrupted occasionally. } } /** * @param userId the user id * @return the Map for userId. Create and return an empty one if required. */ private Map<LocalDate, MemoryData> getListForUserId(String userId) { if (!memoryLists.containsKey(userId)) { memoryLists.put(userId, new TreeMap<LocalDate, MemoryData>()); } return memoryLists.get(userId); } /** @return Whether we should randomly fail this call. */ private boolean checkSimulatedFail() { if (simulatedFailureRate > 0) { int randomInt = random.nextInt(100); return randomInt < simulatedFailureRate; } else { return false; } } /** * @param userId the user id. * @param listener The callback to receive the results. */ @Override public void getMemories(String userId, MemoryListListener listener) { getMemories(userId, null, null, listener); } /** * Get a list of memories between fromDate and toDate (inclusive). Either date can be null, which * indicates no bound on the range in that direction. * * @param userId the user id. * @param fromDate the earliest date to include memories from. * @param toDate the latest date to include memories from. * @param listener callback for results. */ @Override public void getMemories( String userId, LocalDate fromDate, LocalDate toDate, MemoryListListener listener) { AsyncParams params = new AsyncParams(); params.listListener = listener; params.userId = userId; params.fromDate = fromDate; params.toDate = toDate; new GetMemoriesTask().execute(params); } /** * @param userId the user id * @param memoryDate the date to get a memory for * @param listener callback for results. */ @Override public void getMemory(String userId, LocalDate memoryDate, SingleMemoryListener listener) { AsyncParams params = new AsyncParams(); params.singleMemoryListener = listener; params.userId = userId; params.memoryDate = memoryDate; new GetMemoryTask().execute(params); } /** * Adds or updates a memory * * @param userId * @param memory */ @Override public void putMemory(String userId, MemoryData memory, SingleMemoryListener listener) { AsyncParams params = new AsyncParams(); params.singleMemoryListener = listener; params.userId = userId; params.memory = memory; new PutMemoryTask().execute(params); } /** * @param userId * @param memoryDate */ @Override public void deleteMemory(String userId, LocalDate memoryDate, SingleMemoryListener listener) { AsyncParams params = new AsyncParams(); params.singleMemoryListener = listener; params.userId = userId; params.memoryDate = memoryDate; new DeleteMemoryTask().execute(params); } /** * Testing method to populate data. * * @param userId * @param memories */ public void populateMemories(String userId, List<MemoryData> memories) throws CloneNotSupportedException { Map<LocalDate, MemoryData> list = getListForUserId(userId); for (MemoryData memory : memories) { list.put(memory.getMemoryDate(), (MemoryData) memory.clone()); } } /** * Testing method to clear data * * @param userId */ public void clearMemories(String userId) { Map<LocalDate, MemoryData> list = getListForUserId(userId); list.clear(); } /** Helper class for passing results around in AsyncTask */ private class AsyncResult { public MemoryListListener listListener; public SingleMemoryListener singleMemoryListener; public MemoryDataList memoryList; public MemoryData memory; public MyDeticException exception; } /** Helper class for passing parameters to AsyncTask */ private class AsyncParams { public MemoryListListener listListener; public SingleMemoryListener singleMemoryListener; public String userId; public LocalDate memoryDate; public LocalDate fromDate; public LocalDate toDate; public MemoryData memory; } /** Class for fetching the memory list asynchronously. */ private class GetMemoriesTask extends AsyncTask<AsyncParams, Void, AsyncResult> { @Override protected AsyncResult doInBackground(AsyncParams... asyncParamses) { AsyncParams params = asyncParamses[0]; AsyncResult result = new AsyncResult(); result.listListener = params.listListener; simulatedSleep(); if (checkSimulatedFail()) { result.exception = new MyDeticReadFailedException("Simulated"); return result; } Map<LocalDate, MemoryData> memoryMap = getListForUserId(params.userId); MemoryDataList retval = new MemoryDataList(params.userId); for (LocalDate d : memoryMap.keySet()) { if ((params.fromDate != null) && (d.isBefore(params.fromDate))) { // before specified from date. continue; } if ((params.toDate != null) && (d.isAfter(params.toDate))) { // after toDate, so we're done. break; } retval.setDate(d); } result.memoryList = retval; return result; } @Override protected void onPostExecute(AsyncResult asyncResult) { if (asyncResult.exception != null) { asyncResult.listListener.onApiError(asyncResult.exception); } else { asyncResult.listListener.onApiResponse(asyncResult.memoryList); } } } /** Background task to fetch a single memory */ private class GetMemoryTask extends AsyncTask<AsyncParams, Void, AsyncResult> { @Override protected void onPostExecute(AsyncResult asyncResult) { if (asyncResult.exception != null) { asyncResult.singleMemoryListener.onApiError(asyncResult.exception); } else { asyncResult.singleMemoryListener.onApiResponse(asyncResult.memory); } } @Override protected AsyncResult doInBackground(AsyncParams... asyncParamses) { AsyncParams params = asyncParamses[0]; AsyncResult result = new AsyncResult(); result.singleMemoryListener = params.singleMemoryListener; simulatedSleep(); if (checkSimulatedFail()) { result.exception = new MyDeticReadFailedException("simulated"); return result; } Map<LocalDate, MemoryData> list = getListForUserId(params.userId); if (!list.containsKey(params.memoryDate)) { result.exception = new MyDeticNoMemoryFoundException(params.userId, params.memoryDate); } result.memory = list.get(params.memoryDate); return result; } } /** Background task to fetch a single memory */ private class PutMemoryTask extends AsyncTask<AsyncParams, Void, AsyncResult> { @Override protected void onPostExecute(AsyncResult asyncResult) { if (asyncResult.exception != null) { asyncResult.singleMemoryListener.onApiError(asyncResult.exception); } else { asyncResult.singleMemoryListener.onApiResponse(asyncResult.memory); } } @Override protected AsyncResult doInBackground(AsyncParams... asyncParamses) { AsyncParams params = asyncParamses[0]; AsyncResult result = new AsyncResult(); result.singleMemoryListener = params.singleMemoryListener; simulatedSleep(); if (checkSimulatedFail()) { result.exception = new MyDeticWriteFailedException("simulated"); return result; } Map<LocalDate, MemoryData> list = getListForUserId(params.userId); try { list.put(params.memory.getMemoryDate(), (MemoryData) params.memory.clone()); } catch (CloneNotSupportedException e) { e.printStackTrace(); } result.memory = list.get(params.memory.getMemoryDate()); result.memory.setCacheState(MemoryData.CACHESTATE_SAVED); return result; } } /** Background task to fetch a single memory */ private class DeleteMemoryTask extends AsyncTask<AsyncParams, Void, AsyncResult> { @Override protected void onPostExecute(AsyncResult asyncResult) { if (asyncResult.exception != null) { asyncResult.singleMemoryListener.onApiError(asyncResult.exception); } else { asyncResult.singleMemoryListener.onApiResponse(asyncResult.memory); } } @Override protected AsyncResult doInBackground(AsyncParams... asyncParamses) { AsyncParams params = asyncParamses[0]; AsyncResult result = new AsyncResult(); result.singleMemoryListener = params.singleMemoryListener; simulatedSleep(); if (checkSimulatedFail()) { result.exception = new MyDeticWriteFailedException("simulated"); return result; } Map<LocalDate, MemoryData> list = getListForUserId(params.userId); if (!list.containsKey(params.memoryDate)) { result.exception = new MyDeticNoMemoryFoundException(params.userId, params.memoryDate); } else { result.memory = list.remove(params.memoryDate); } return result; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.salesforce; import org.apache.camel.util.ObjectHelper; import org.apache.camel.support.jsse.KeyStoreParameters; /** * Configuration object for Salesforce login properties */ public class SalesforceLoginConfig { public static final String DEFAULT_LOGIN_URL = "https://login.salesforce.com"; private AuthenticationType type; private String instanceUrl; private String loginUrl; private String clientId; private String clientSecret; private String refreshToken; private String userName; private String password; // allow lazy login into Salesforce // note that login issues may not surface until a message needs to be processed private boolean lazyLogin; private KeyStoreParameters keystore; public SalesforceLoginConfig() { loginUrl = DEFAULT_LOGIN_URL; lazyLogin = false; } private SalesforceLoginConfig(AuthenticationType type, String loginUrl, String clientId, String clientSecret, String refreshToken, String userName, String password, boolean lazyLogin, KeyStoreParameters keystore) { this.type = type; this.loginUrl = loginUrl; this.clientId = clientId; this.clientSecret = clientSecret; this.refreshToken = refreshToken; this.userName = userName; this.password = password; this.lazyLogin = lazyLogin; this.keystore = keystore; } public SalesforceLoginConfig(String loginUrl, String clientId, String clientSecret, String userName, String password, boolean lazyLogin) { this(AuthenticationType.USERNAME_PASSWORD, loginUrl, clientId, clientSecret, null, userName, password, lazyLogin, null); } public SalesforceLoginConfig(String loginUrl, String clientId, String clientSecret, String refreshToken, boolean lazyLogin) { this(AuthenticationType.REFRESH_TOKEN, loginUrl, clientId, clientSecret, refreshToken, null, null, lazyLogin, null); } public SalesforceLoginConfig(String loginUrl, String clientId, String userName, KeyStoreParameters keystore, boolean lazyLogin) { this(AuthenticationType.JWT, loginUrl, clientId, null, null, userName, null, lazyLogin, keystore); } public String getInstanceUrl() { return instanceUrl; } public void setInstanceUrl(final String instanceUrl) { this.instanceUrl = instanceUrl; } public String getLoginUrl() { return loginUrl; } /** * Salesforce login URL, defaults to https://login.salesforce.com */ public void setLoginUrl(String loginUrl) { this.loginUrl = loginUrl; } public String getClientId() { return clientId; } /** * Salesforce connected application Consumer Key */ public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } /** * Salesforce connected application Consumer Secret */ public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } /** * Keystore parameters for keystore containing certificate and private key needed for OAuth 2.0 JWT Bearer Token * Flow. */ public void setKeystore(final KeyStoreParameters keystore) { this.keystore = keystore; } public KeyStoreParameters getKeystore() { return keystore; } public String getRefreshToken() { return refreshToken; } /** * Salesforce connected application Consumer token */ public void setRefreshToken(String refreshToken) { this.refreshToken = refreshToken; } public AuthenticationType getType() { if (type != null) { // use the user provided type return type; } final boolean hasPassword = ObjectHelper.isNotEmpty(password); final boolean hasRefreshToken = ObjectHelper.isNotEmpty(refreshToken); final boolean hasKeystore = keystore != null && ObjectHelper.isNotEmpty(keystore.getResource()); if (hasPassword && !hasRefreshToken && !hasKeystore) { return AuthenticationType.USERNAME_PASSWORD; } if (!hasPassword && hasRefreshToken && !hasKeystore) { return AuthenticationType.REFRESH_TOKEN; } if (!hasPassword && !hasRefreshToken && hasKeystore) { return AuthenticationType.JWT; } if (hasPassword && hasRefreshToken || hasPassword && hasKeystore || hasRefreshToken && hasKeystore) { throw new IllegalArgumentException("The provided authentication configuration can be used in multiple ways" + " for instance both with username/password and refresh_token. Either remove some of the configuration" + " options, so that authentication method can be auto-determined or explicitly set the authentication" + " type."); } throw new IllegalArgumentException( "You must specify parameters aligned with one of the supported authentication methods:" + " for username and password authentication: userName, password, clientSecret;" + " for refresh token authentication: refreshToken, clientSecret;" + " for JWT: userName, keystore. And for every one of those loginUrl and clientId must be specified also."); } public void setType(AuthenticationType type) { this.type = type; } public String getUserName() { return userName; } /** * Salesforce account user name */ public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } /** * Salesforce account password */ public void setPassword(String password) { this.password = password; } public boolean isLazyLogin() { return lazyLogin; } /** * Flag to enable/disable lazy OAuth, default is false. When enabled, OAuth token retrieval or generation is not * done until the first API call */ public void setLazyLogin(boolean lazyLogin) { this.lazyLogin = lazyLogin; } public void validate() { ObjectHelper.notNull(loginUrl, "loginUrl"); ObjectHelper.notNull(clientId, "clientId"); final AuthenticationType type = getType(); switch (type) { case USERNAME_PASSWORD: ObjectHelper.notNull(userName, "userName (username/password authentication)"); ObjectHelper.notNull(password, "password (username/password authentication)"); ObjectHelper.notNull(clientSecret, "clientSecret (username/password authentication)"); break; case REFRESH_TOKEN: ObjectHelper.notNull(refreshToken, "refreshToken (authentication with refresh token)"); ObjectHelper.notNull(clientSecret, "clientSecret (authentication with refresh token)"); break; case JWT: ObjectHelper.notNull(userName, "userName (JWT authentication)"); ObjectHelper.notNull(keystore, "keystore (JWT authentication)"); break; default: throw new IllegalArgumentException("Unknown authentication type: " + type); } } @Override public String toString() { return "SalesforceLoginConfig[" + "instanceUrl= '" + instanceUrl + "', loginUrl='" + loginUrl + '\'' + "," + "clientId='" + clientId + '\'' + ", clientSecret='********'" + ", refreshToken='" + refreshToken + '\'' + ", userName='" + userName + '\'' + ", password=********'" + password + '\'' + ", keystore=********'" + keystore + '\'' + ", lazyLogin=" + lazyLogin + ']'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter; import com.google.common.base.Joiner; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Type; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.nio.file.attribute.PosixFilePermission; import java.util.Enumeration; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.StringMap; import com.google.gson.reflect.TypeToken; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.NullArgumentException; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.aether.RepositoryException; import org.sonatype.aether.repository.Authentication; import org.sonatype.aether.repository.Proxy; import org.sonatype.aether.repository.RemoteRepository; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.dep.Dependency; import org.apache.zeppelin.dep.DependencyResolver; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.AngularObjectRegistryListener; import org.apache.zeppelin.helium.ApplicationEventListener; import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.remote.RemoteInterpreter; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.apache.zeppelin.scheduler.Job; import org.apache.zeppelin.scheduler.Job.Status; /** * Manage interpreters. */ public class InterpreterFactory implements InterpreterGroupFactory { private static final Logger logger = LoggerFactory.getLogger(InterpreterFactory.class); private Map<String, URLClassLoader> cleanCl = Collections.synchronizedMap(new HashMap<String, URLClassLoader>()); private ZeppelinConfiguration conf; private final InterpreterSettingManager interpreterSettingManager; private Gson gson; private AngularObjectRegistryListener angularObjectRegistryListener; private final RemoteInterpreterProcessListener remoteInterpreterProcessListener; private final ApplicationEventListener appEventListener; private boolean shiroEnabled; private Map<String, String> env = new HashMap<>(); private Interpreter devInterpreter; public InterpreterFactory(ZeppelinConfiguration conf, AngularObjectRegistryListener angularObjectRegistryListener, RemoteInterpreterProcessListener remoteInterpreterProcessListener, ApplicationEventListener appEventListener, DependencyResolver depResolver, boolean shiroEnabled, InterpreterSettingManager interpreterSettingManager) throws InterpreterException, IOException, RepositoryException { this.conf = conf; this.angularObjectRegistryListener = angularObjectRegistryListener; this.remoteInterpreterProcessListener = remoteInterpreterProcessListener; this.appEventListener = appEventListener; this.shiroEnabled = shiroEnabled; GsonBuilder builder = new GsonBuilder(); builder.setPrettyPrinting(); gson = builder.create(); this.interpreterSettingManager = interpreterSettingManager; //TODO(jl): Fix it not to use InterpreterGroupFactory interpreterSettingManager.setInterpreterGroupFactory(this); logger.info("shiroEnabled: {}", shiroEnabled); } /** * @param id interpreterGroup id. Combination of interpreterSettingId + noteId/userId/shared * depends on interpreter mode */ @Override public InterpreterGroup createInterpreterGroup(String id, InterpreterOption option) throws InterpreterException, NullArgumentException { //When called from REST API without option we receive NPE if (option == null) { throw new NullArgumentException("option"); } AngularObjectRegistry angularObjectRegistry; InterpreterGroup interpreterGroup = new InterpreterGroup(id); if (option.isRemote()) { angularObjectRegistry = new RemoteAngularObjectRegistry(id, angularObjectRegistryListener, interpreterGroup); } else { angularObjectRegistry = new AngularObjectRegistry(id, angularObjectRegistryListener); // TODO(moon) : create distributed resource pool for local interpreters and set } interpreterGroup.setAngularObjectRegistry(angularObjectRegistry); return interpreterGroup; } public void createInterpretersForNote(InterpreterSetting interpreterSetting, String user, String noteId, String interpreterSessionKey) { InterpreterGroup interpreterGroup = interpreterSetting.getInterpreterGroup(user, noteId); InterpreterOption option = interpreterSetting.getOption(); Properties properties = (Properties) interpreterSetting.getProperties(); // if interpreters are already there, wait until they're being removed synchronized (interpreterGroup) { long interpreterRemovalWaitStart = System.nanoTime(); // interpreter process supposed to be terminated by RemoteInterpreterProcess.dereference() // in ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT msec. However, if termination of the process and // removal from interpreter group take too long, throw an error. long minTimeout = 10L * 1000 * 1000000; // 10 sec long interpreterRemovalWaitTimeout = Math.max(minTimeout, conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT) * 1000000L * 2); while (interpreterGroup.containsKey(interpreterSessionKey)) { if (System.nanoTime() - interpreterRemovalWaitStart > interpreterRemovalWaitTimeout) { throw new InterpreterException("Can not create interpreter"); } try { interpreterGroup.wait(1000); } catch (InterruptedException e) { logger.debug(e.getMessage(), e); } } } logger.info("Create interpreter instance {} for note {}", interpreterSetting.getName(), noteId); List<InterpreterInfo> interpreterInfos = interpreterSetting.getInterpreterInfos(); String path = interpreterSetting.getPath(); InterpreterRunner runner = interpreterSetting.getInterpreterRunner(); Interpreter interpreter; for (InterpreterInfo info : interpreterInfos) { if (option.isRemote()) { if (option.isExistingProcess()) { interpreter = connectToRemoteRepl(interpreterSessionKey, info.getClassName(), option.getHost(), option.getPort(), properties, interpreterSetting.getId(), user, option.isUserImpersonate); } else { interpreter = createRemoteRepl(path, interpreterSessionKey, info.getClassName(), properties, interpreterSetting.getId(), user, option.isUserImpersonate(), runner); } } else { interpreter = createRepl(interpreterSetting.getPath(), info.getClassName(), properties); } synchronized (interpreterGroup) { List<Interpreter> interpreters = interpreterGroup.get(interpreterSessionKey); if (null == interpreters) { interpreters = new ArrayList<>(); interpreterGroup.put(interpreterSessionKey, interpreters); } if (info.isDefaultInterpreter()) { interpreters.add(0, interpreter); } else { interpreters.add(interpreter); } } logger.info("Interpreter {} {} created", interpreter.getClassName(), interpreter.hashCode()); interpreter.setInterpreterGroup(interpreterGroup); } } private Interpreter createRepl(String dirName, String className, Properties property) throws InterpreterException { logger.info("Create repl {} from {}", className, dirName); ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); try { URLClassLoader ccl = cleanCl.get(dirName); if (ccl == null) { // classloader fallback ccl = URLClassLoader.newInstance(new URL[]{}, oldcl); } boolean separateCL = true; try { // check if server's classloader has driver already. Class cls = this.getClass().forName(className); if (cls != null) { separateCL = false; } } catch (Exception e) { logger.error("exception checking server classloader driver", e); } URLClassLoader cl; if (separateCL == true) { cl = URLClassLoader.newInstance(new URL[]{}, ccl); } else { cl = ccl; } Thread.currentThread().setContextClassLoader(cl); Class<Interpreter> replClass = (Class<Interpreter>) cl.loadClass(className); Constructor<Interpreter> constructor = replClass.getConstructor(new Class[]{Properties.class}); Interpreter repl = constructor.newInstance(property); repl.setClassloaderUrls(ccl.getURLs()); LazyOpenInterpreter intp = new LazyOpenInterpreter(new ClassloaderInterpreter(repl, cl)); return intp; } catch (SecurityException e) { throw new InterpreterException(e); } catch (NoSuchMethodException e) { throw new InterpreterException(e); } catch (IllegalArgumentException e) { throw new InterpreterException(e); } catch (InstantiationException e) { throw new InterpreterException(e); } catch (IllegalAccessException e) { throw new InterpreterException(e); } catch (InvocationTargetException e) { throw new InterpreterException(e); } catch (ClassNotFoundException e) { throw new InterpreterException(e); } finally { Thread.currentThread().setContextClassLoader(oldcl); } } private Interpreter connectToRemoteRepl(String interpreterSessionKey, String className, String host, int port, Properties property, String interpreterSettingId, String userName, Boolean isUserImpersonate) { int connectTimeout = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT); int maxPoolSize = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_MAX_POOL_SIZE); String localRepoPath = conf.getInterpreterLocalRepoPath() + "/" + interpreterSettingId; LazyOpenInterpreter intp = new LazyOpenInterpreter( new RemoteInterpreter(property, interpreterSessionKey, className, host, port, localRepoPath, connectTimeout, maxPoolSize, remoteInterpreterProcessListener, appEventListener, userName, isUserImpersonate)); return intp; } Interpreter createRemoteRepl(String interpreterPath, String interpreterSessionKey, String className, Properties property, String interpreterSettingId, String userName, Boolean isUserImpersonate, InterpreterRunner interpreterRunner) { int connectTimeout = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT); String localRepoPath = conf.getInterpreterLocalRepoPath() + "/" + interpreterSettingId; int maxPoolSize = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_MAX_POOL_SIZE); String interpreterRunnerPath; if (null != interpreterRunner) { interpreterRunnerPath = interpreterRunner.getPath(); Path p = Paths.get(interpreterRunnerPath); if (!p.isAbsolute()) { interpreterRunnerPath = Joiner.on(File.separator) .join(interpreterPath, interpreterRunnerPath); } } else { interpreterRunnerPath = conf.getInterpreterRemoteRunnerPath(); } RemoteInterpreter remoteInterpreter = new RemoteInterpreter(property, interpreterSessionKey, className, interpreterRunnerPath, interpreterPath, localRepoPath, connectTimeout, maxPoolSize, remoteInterpreterProcessListener, appEventListener, userName, isUserImpersonate); remoteInterpreter.addEnv(env); return new LazyOpenInterpreter(remoteInterpreter); } private List<Interpreter> createOrGetInterpreterList(String user, String noteId, InterpreterSetting setting) { InterpreterGroup interpreterGroup = setting.getInterpreterGroup(user, noteId); synchronized (interpreterGroup) { String interpreterSessionKey = interpreterSettingManager.getInterpreterSessionKey(user, noteId, setting); if (!interpreterGroup.containsKey(interpreterSessionKey)) { createInterpretersForNote(setting, user, noteId, interpreterSessionKey); } return interpreterGroup.get(interpreterSessionKey); } } private InterpreterSetting getInterpreterSettingByGroup(List<InterpreterSetting> settings, String group) { Preconditions.checkNotNull(group, "group should be not null"); for (InterpreterSetting setting : settings) { if (group.equals(setting.getName())) { return setting; } } return null; } private String getInterpreterClassFromInterpreterSetting(InterpreterSetting setting, String name) { Preconditions.checkNotNull(name, "name should be not null"); for (InterpreterInfo info : setting.getInterpreterInfos()) { String infoName = info.getName(); if (null != info.getName() && name.equals(infoName)) { return info.getClassName(); } } return null; } private Interpreter getInterpreter(String user, String noteId, InterpreterSetting setting, String name) { Preconditions.checkNotNull(noteId, "noteId should be not null"); Preconditions.checkNotNull(setting, "setting should be not null"); Preconditions.checkNotNull(name, "name should be not null"); String className; if (null != (className = getInterpreterClassFromInterpreterSetting(setting, name))) { List<Interpreter> interpreterGroup = createOrGetInterpreterList(user, noteId, setting); for (Interpreter interpreter : interpreterGroup) { if (className.equals(interpreter.getClassName())) { return interpreter; } } } return null; } public Interpreter getInterpreter(String user, String noteId, String replName) { List<InterpreterSetting> settings = interpreterSettingManager.getInterpreterSettings(noteId); InterpreterSetting setting; Interpreter interpreter; if (settings == null || settings.size() == 0) { return null; } if (replName == null || replName.trim().length() == 0) { // get default settings (first available) // TODO(jl): Fix it in case of returning null InterpreterSetting defaultSettings = interpreterSettingManager .getDefaultInterpreterSetting(settings); return createOrGetInterpreterList(user, noteId, defaultSettings).get(0); } String[] replNameSplit = replName.split("\\."); if (replNameSplit.length == 2) { String group = null; String name = null; group = replNameSplit[0]; name = replNameSplit[1]; setting = getInterpreterSettingByGroup(settings, group); if (null != setting) { interpreter = getInterpreter(user, noteId, setting, name); if (null != interpreter) { return interpreter; } } throw new InterpreterException(replName + " interpreter not found"); } else { // first assume replName is 'name' of interpreter. ('groupName' is ommitted) // search 'name' from first (default) interpreter group // TODO(jl): Handle with noteId to support defaultInterpreter per note. setting = interpreterSettingManager.getDefaultInterpreterSetting(settings); interpreter = getInterpreter(user, noteId, setting, replName); if (null != interpreter) { return interpreter; } // next, assume replName is 'group' of interpreter ('name' is ommitted) // search interpreter group and return first interpreter. setting = getInterpreterSettingByGroup(settings, replName); if (null != setting) { List<Interpreter> interpreters = createOrGetInterpreterList(user, noteId, setting); if (null != interpreters) { return interpreters.get(0); } } // Support the legacy way to use it for (InterpreterSetting s : settings) { if (s.getGroup().equals(replName)) { List<Interpreter> interpreters = createOrGetInterpreterList(user, noteId, s); if (null != interpreters) { return interpreters.get(0); } } } } return null; } public Map<String, String> getEnv() { return env; } public void setEnv(Map<String, String> env) { this.env = env; } }
/* * IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2002 Jan Blok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.panels; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.Arrays; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import com.coi.tools.os.win.MSWinConstants; import com.coi.tools.os.win.NativeLibException; import com.izforge.izpack.installer.AutomatedInstallData; import com.izforge.izpack.installer.PanelConsole; import com.izforge.izpack.installer.PanelConsoleHelper; import com.izforge.izpack.installer.ScriptParser; import com.izforge.izpack.util.Debug; import com.izforge.izpack.util.FileExecutor; import com.izforge.izpack.util.OsVersion; import com.izforge.izpack.util.VariableSubstitutor; import com.izforge.izpack.util.os.RegistryDefaultHandler; import com.izforge.izpack.util.os.RegistryHandler; /** * The Target panel console helper class. * * @author Mounir El Hajj */ public class JDKPathPanelConsoleHelper extends PanelConsoleHelper implements PanelConsole { private String minVersion; private String maxVersion; private String variableName; private String detectedVersion; public boolean runGeneratePropertiesFile(AutomatedInstallData installData,PrintWriter printWriter) { printWriter.println(ScriptParser.INSTALL_PATH + "="); return true; } public boolean runConsoleFromPropertiesFile(AutomatedInstallData installData, Properties p) { String strTargetPath = p.getProperty(ScriptParser.INSTALL_PATH); if (strTargetPath == null || "".equals(strTargetPath.trim())) { System.err.println("Inputting the target path is mandatory!!!!"); return false; } else { VariableSubstitutor vs = new VariableSubstitutor(installData.getVariables()); strTargetPath = vs.substitute(strTargetPath, null); installData.setInstallPath(strTargetPath); return true; } } public boolean runConsole(AutomatedInstallData idata) { minVersion = idata.getVariable("JDKPathPanel.minVersion"); maxVersion = idata.getVariable("JDKPathPanel.maxVersion"); variableName = "JDKPath"; String strPath = ""; String strDefaultPath = idata.getVariable(variableName); if ( strDefaultPath == null ) { if (OsVersion.IS_OSX) { strDefaultPath = JDKPathPanel.OSX_JDK_HOME; } else { // Try the JAVA_HOME as child dir of the jdk path strDefaultPath = (new File(idata.getVariable("JAVA_HOME"))).getParent(); } } if (!pathIsValid(strDefaultPath) || !verifyVersion(minVersion, maxVersion, strDefaultPath)) { strDefaultPath = resolveInRegistry(minVersion, maxVersion); if (!pathIsValid(strDefaultPath) || !verifyVersion(minVersion, maxVersion, strDefaultPath)) { strDefaultPath = ""; } } boolean bKeepAsking = true; while (bKeepAsking) { System.out.println("Select JDK path [" + strDefaultPath + "] "); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); try { String strIn = br.readLine(); if (!strIn.trim().equals("")) { strPath = strIn.trim(); } else { strPath = strDefaultPath; } } catch (IOException e) { e.printStackTrace(); } if ( !pathIsValid(strPath) ) { System.out.println("Path "+strPath+" is not valid."); } else if ( !verifyVersion(minVersion, maxVersion, strPath) ) { System.out.println("The chosen JDK has the wrong version (available: "+detectedVersion+" required: "+minVersion+" - "+maxVersion+")."); System.out.println("Continue anyway? [no]"); br = new BufferedReader(new InputStreamReader(System.in)); try { String strIn = br.readLine(); if ( strIn.trim().toLowerCase().equals("y") || strIn.trim().toLowerCase().equals("yes") ) { bKeepAsking = false; } } catch (IOException e) { e.printStackTrace(); } } else { bKeepAsking = false; } idata.setVariable(variableName, strPath); } int i = askEndOfConsolePanel(); if (i == 1) { return true; } else if (i == 2) { return false; } else { return runConsole(idata); } } /** * Returns whether the chosen path is true or not. If existFiles are not null, the existence of * it under the chosen path are detected. This method can be also implemented in derived * classes to handle special verification of the path. * * @return true if existFiles are exist or not defined, else false */ private static boolean pathIsValid(String strPath) { for (String existFile : JDKPathPanel.testFiles) { File path = new File(strPath, existFile).getAbsoluteFile(); if (!path.exists()) { return false; } } return true; } private boolean verifyVersion(String min, String max, String path) { boolean retval = true; // No min and max, version always ok. if (min == null && max == null) { return (true); } // Now get the version ... // We cannot look to the version of this vm because we should // test the given JDK VM. String[] params; if ( System.getProperty("os.name").indexOf("Windows") >= 0 ) { String[] paramsp = { "cmd", "/c", path + File.separator + "bin" + File.separator + "java", "-version" }; params=paramsp; } else { String[] paramsp = { path + File.separator + "bin" + File.separator + "java", "-version" }; params=paramsp; } String[] output = new String[2]; FileExecutor fe = new FileExecutor(); fe.executeCommand(params, output); // "My" VM writes the version on stderr :-( String vs = (output[0].length() > 0) ? output[0] : output[1]; if (min != null) { if (!compareVersions(vs, min, true, 4, 4, "__NO_NOT_IDENTIFIER_")) { retval = false; } } if (max != null) { if (!compareVersions(vs, max, false, 4, 4, "__NO_NOT_IDENTIFIER_")) { retval = false; } } return retval; } private boolean compareVersions(String in, String template, boolean isMin, int assumedPlace, int halfRange, String useNotIdentifier) { StringTokenizer st = new StringTokenizer(in, " \t\n\r\f\""); int i; int currentRange = 0; String[] interestedEntries = new String[halfRange + halfRange]; for (i = 0; i < assumedPlace - halfRange; ++i) { if (st.hasMoreTokens()) { st.nextToken(); // Forget this entries. } } for (i = 0; i < halfRange + halfRange; ++i) { // Put the interesting Strings into an intermediaer array. if (st.hasMoreTokens()) { interestedEntries[i] = st.nextToken(); currentRange++; } } for (i = 0; i < currentRange; ++i) { if (useNotIdentifier != null && interestedEntries[i].indexOf(useNotIdentifier) > -1) { continue; } if (Character.getType(interestedEntries[i].charAt(0)) != Character.DECIMAL_DIGIT_NUMBER) { continue; } break; } if (i == currentRange) { detectedVersion = "<not found>"; return (false); } detectedVersion = interestedEntries[i]; StringTokenizer current = new StringTokenizer(interestedEntries[i], "._-"); StringTokenizer needed = new StringTokenizer(template, "._-"); while (needed.hasMoreTokens()) { // Current can have no more tokens if needed has more // and if a privious token was not accepted as good version. // e.g. 1.4.2_02 needed, 1.4.2 current. The false return // will be right here. Only if e.g. needed is 1.4.2_00 the // return value will be false, but zero should not b e used // at the last version part. if (!current.hasMoreTokens()) { return (false); } String cur = current.nextToken(); String nee = needed.nextToken(); int curVal = 0; int neededVal = 0; try { curVal = Integer.parseInt(cur); neededVal = Integer.parseInt(nee); } catch (NumberFormatException nfe) { // A number format exception will be raised if // there is a non numeric part in the version, // e.g. 1.5.0_beta. The verification runs only into // this deep area of version number (fourth sub place) // if all other are equal to the given limit. Then // it is right to return false because e.g. // the minimal needed version will be 1.5.0.2. return (false); } if (curVal < neededVal) { if (isMin) { return (false); } return (true); } if (curVal > neededVal) { if (isMin) { return (true); } return (false); } } return (true); } /** * Returns the path to the needed JDK if found in the registry. If there are more than one JDKs * registered, that one with the highest allowd version will be returned. Works only on windows. * On Unix an empty string returns. * * @return the path to the needed JDK if found in the windows registry */ private String resolveInRegistry(String min, String max) { String retval = ""; int oldVal = 0; RegistryHandler rh = null; Set<String> badRegEntries = new HashSet<String>(); try { // Get the default registry handler. rh = RegistryDefaultHandler.getInstance(); if (rh == null) // We are on a os which has no registry or the // needed dll was not bound to this installation. In // both cases we forget the try to get the JDK path from registry. { return (retval); } oldVal = rh.getRoot(); // Only for security... rh.setRoot(MSWinConstants.HKEY_LOCAL_MACHINE); String[] keys = rh.getSubkeys(JDKPathPanel.JDK_ROOT_KEY); if (keys == null || keys.length == 0) { return (retval); } Arrays.sort(keys); int i = keys.length - 1; // We search for the highest allowd version, therefore retrograde while (i > 0) { if ( max == null || compareVersions(keys[i], max, false, 4, 4, "__NO_NOT_IDENTIFIER_")) { // First allowed version found, now we have to test that the min value // also allows this version. if ( min == null || compareVersions(keys[i], min, true, 4, 4, "__NO_NOT_IDENTIFIER_")) { String cv = JDKPathPanel.JDK_ROOT_KEY + "\\" + keys[i]; String path = rh.getValue(cv, JDKPathPanel.JDK_VALUE_NAME).getStringData(); // Use it only if the path is valid. // Set the path for method pathIsValid ... if (!pathIsValid(path)) { badRegEntries.add(keys[i]); } else if ("".equals(retval)) { retval = path; } } } i--; } } catch (Exception e) { // Will only be happen if registry handler is good, but an // exception at performing was thrown. This is an error... e.printStackTrace(); } finally { if (rh != null && oldVal != 0) { try { rh.setRoot(MSWinConstants.HKEY_LOCAL_MACHINE); } catch (NativeLibException e) { e.printStackTrace(); } } } return (retval); } }
/* * Copyright 2019 Mark Adamcin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.adamcin.oakpal.maven.mojo; import net.adamcin.oakpal.core.AbortedScanException; import net.adamcin.oakpal.core.CheckReport; import net.adamcin.oakpal.core.CheckSpec; import net.adamcin.oakpal.core.ForcedRoot; import net.adamcin.oakpal.core.InstallHookPolicy; import net.adamcin.oakpal.core.JcrNs; import net.adamcin.oakpal.api.Nothing; import net.adamcin.oakpal.core.ReportMapper; import net.adamcin.oakpal.testing.TestPackageUtil; import org.apache.commons.io.FileUtils; import org.apache.maven.plugin.MojoFailureException; import org.jetbrains.annotations.NotNull; import org.junit.Before; import org.junit.Test; import java.io.File; import java.util.Collections; import java.util.List; import java.util.Optional; import static net.adamcin.oakpal.api.Fun.result0; import static net.adamcin.oakpal.core.InstallHookPolicy.PROHIBIT; import static net.adamcin.oakpal.api.JavaxJson.key; import static net.adamcin.oakpal.api.JavaxJson.obj; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class AbstractITestWithPlanMojoTest { private final File testOutBaseDir = new File("target/test-out/AbstractITestWithPlanMojoTest"); @Before public void setUp() throws Exception { testOutBaseDir.mkdirs(); } private static final class AbstractITestWithPlanMojoStub extends AbstractITestWithPlanMojo { @Override protected boolean isIndividuallySkipped() { return false; } } private static AbstractITestWithPlanMojo newMojo() throws Exception { final AbstractITestWithPlanMojo mojo = new AbstractITestWithPlanMojoStub(); MockMojoLog log = new MockMojoLog(); mojo.setLog(log); return mojo; } @Test public void testGetters() throws Exception { final File testOutDir = new File(testOutBaseDir, "testGetters"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); AbstractITestWithPlanMojo mojo = newMojo(); assertEquals("expect empty by default", Collections.emptyList(), mojo.getPreInstallArtifacts()); final DependencyFilter preInstallArtifact = new DependencyFilter(); mojo.preInstallArtifacts.add(preInstallArtifact); assertEquals("expect same preInstallArtifacts", Collections.singletonList(preInstallArtifact), mojo.getPreInstallArtifacts()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getPreInstallFiles()); final File preInstallFile = new File(testOutDir, "pre-install-file.zip"); mojo.preInstallFiles.add(preInstallFile); assertEquals("expect same preInstallFiles", Collections.singletonList(preInstallFile), mojo.getPreInstallFiles()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getCndNames()); final String expectCndName = "foo.cnd"; mojo.cndNames.add(expectCndName); assertEquals("expect same cndNames", Collections.singletonList(expectCndName), mojo.getCndNames()); assertFalse("expect false slingNodeTypes by default", mojo.isSlingNodeTypes()); mojo.slingNodeTypes = true; assertTrue("expect true slingNodeTypes", mojo.isSlingNodeTypes()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getJcrNamespaces()); final JcrNs expectJcrNs = JcrNs.create("foo", "http://foo.com"); mojo.jcrNamespaces.add(expectJcrNs); assertEquals("expect same jcrNamespaces", Collections.singletonList(expectJcrNs), mojo.getJcrNamespaces()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getJcrPrivileges()); final String expectJcrPrivilege = "foo:canDo"; mojo.jcrPrivileges.add(expectJcrPrivilege); assertEquals("expect same jcrPrivileges", Collections.singletonList(expectJcrPrivilege), mojo.getJcrPrivileges()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getForcedRoots()); final ForcedRoot expectForcedRoot = new ForcedRoot().withPath("/foo"); mojo.forcedRoots.add(expectForcedRoot); assertEquals("expect same forcedRoots", Collections.singletonList(expectForcedRoot), mojo.getForcedRoots()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getChecks()); final CheckSpec expectCheck = CheckSpec.fromJson(key("name", "myCheck").get()); mojo.checks.add(expectCheck); assertEquals("expect same checks", Collections.singletonList(expectCheck), mojo.getChecks()); assertEquals("expect empty by default", Collections.emptyList(), mojo.getChecklists()); final String expectChecklist = "fooChecklist"; mojo.checklists.add(expectChecklist); assertEquals("expect same checklists", Collections.singletonList(expectChecklist), mojo.getChecklists()); assertFalse("expect false enablePreInstallHooks by default", mojo.isEnablePreInstallHooks()); mojo.enablePreInstallHooks = true; assertTrue("expect true enablePreInstallHooks", mojo.isEnablePreInstallHooks()); assertNull("expect null installHookPolicy by default", mojo.getInstallHookPolicy()); final InstallHookPolicy expectInstallHookPolicy = PROHIBIT; mojo.installHookPolicy = PROHIBIT; assertSame("expect same installHookPolicy", PROHIBIT, mojo.getInstallHookPolicy()); PlanBuilderParams params = mojo.getPlanBuilderParams(); assertEquals("expect param", Collections.singletonList(preInstallArtifact), params.getPreInstallArtifacts()); assertEquals("expect param", Collections.singletonList(preInstallFile), params.getPreInstallFiles()); assertEquals("expect param", Collections.singletonList(expectCndName), params.getCndNames()); assertTrue("expect param", params.isSlingNodeTypes()); assertEquals("expect param", Collections.singletonList(expectJcrNs), params.getJcrNamespaces()); assertEquals("expect param", Collections.singletonList(expectJcrPrivilege), params.getJcrPrivileges()); assertEquals("expect param", Collections.singletonList(expectForcedRoot), params.getForcedRoots()); assertEquals("expect param", Collections.singletonList(expectCheck), params.getChecks()); assertEquals("expect param", Collections.singletonList(expectChecklist), params.getChecklists()); assertTrue("expect param", params.isEnablePreInstallHooks()); assertEquals("expect param", PROHIBIT, params.getInstallHookPolicy()); } @Test(expected = AbortedScanException.class) public void testPerformScan_abortedScan() throws Exception { final File abortingPackage = TestPackageUtil.prepareTestPackage("unfiltered_package.zip"); AbstractITestWithPlanMojo mojo = newMojo(); result0(() -> { mojo.performScan(Collections.singletonList(abortingPackage)); return Nothing.instance; }).get().throwCause(AbortedScanException.class); } @Test(expected = MojoFailureException.class) public void testPerformScan_invalidCheck() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_invalidBlobStore"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); AbstractITestWithPlanMojo mojo = newMojo(); final File blobStore = new File(testOutDir, "blobStore"); FileUtils.touch(blobStore); mojo.blobStorePath = blobStore.getAbsolutePath(); mojo.checks.add(CheckSpec.fromJson(key("impl", "com.example.NotAClass").get())); mojo.performScan(Collections.emptyList()); } @Test(expected = MojoFailureException.class) public void testPerformScan_failViolations() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_failViolations"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); final File summaryFile = new File(testOutDir, "summary.json"); AbstractITestWithPlanMojo mojo = newMojo(); scanWithViolations(mojo, summaryFile); } static void scanWithViolations(final @NotNull AbstractITestWithPlanMojo mojo, final @NotNull File summaryFile) throws Exception { mojo.summaryFile = summaryFile; mojo.checks.add(CheckSpec.fromJson( key("inlineScript", "function afterExtract(packageId){ oakpal.majorViolation(\"fail\", packageId);}") .get())); final File testPackage = TestPackageUtil.prepareTestPackage("tmp_foo_bar.zip"); mojo.performScan(Collections.singletonList(testPackage)); } static void scanWithSubpackageViolations(final @NotNull AbstractITestWithPlanMojo mojo, final @NotNull File summaryFile) throws Exception { mojo.summaryFile = summaryFile; mojo.checks.add(CheckSpec.fromJson(obj() .key("name", "subfailer") .key("inlineScript", "function identifySubpackage(subpackageId, parentId){ oakpal.majorViolation(\"fail\", subpackageId);}") .get())); final File testPackage = TestPackageUtil.prepareTestPackage("subsubtest.zip"); mojo.performScan(Collections.singletonList(testPackage)); } @Test public void testPerformScan_silenceAllSubpackages() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_silenceAllSubpackages"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); final File summaryWithSubsFile = new File(testOutDir, "summaryWithSubs.json"); AbstractITestWithPlanMojo mojo = newMojo(); mojo.deferBuildFailure = true; scanWithSubpackageViolations(mojo, summaryWithSubsFile); List<CheckReport> reportsWithSubs = ReportMapper.readReportsFromFile(summaryWithSubsFile); Optional<CheckReport> checkReportWithSubs = reportsWithSubs.stream().filter(report -> "subfailer".equals(report.getCheckName())).findFirst(); assertTrue("subfailer is present", checkReportWithSubs.isPresent()); assertEquals("subfailer violations count", 3, checkReportWithSubs.get().getViolations().size()); final File summaryNoSubsFile = new File(testOutDir, "summaryWithSubs.json"); mojo.silenceAllSubpackages = true; scanWithSubpackageViolations(mojo, summaryNoSubsFile); List<CheckReport> reportsNoSubs = ReportMapper.readReportsFromFile(summaryWithSubsFile); Optional<CheckReport> checkReportNoSubs = reportsNoSubs.stream().filter(report -> "subfailer".equals(report.getCheckName())).findFirst(); assertTrue("subfailer is present", checkReportNoSubs.isPresent()); assertEquals("subfailer violations count", 0, checkReportNoSubs.get().getViolations().size()); } @Test public void testPerformScan_deferBuildFailure() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_deferBuildFailure"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); final File summaryFile = new File(testOutDir, "summary.json"); AbstractITestWithPlanMojo mojo = newMojo(); mojo.deferBuildFailure = true; scanWithViolations(mojo, summaryFile); MockMojoLog log = (MockMojoLog) mojo.getLog(); assertTrue("last message matches", log.last().filter(entry -> entry.message.startsWith("Evaluation of check reports")).isPresent()); } @Test public void testPerformScan_withBlobStorePath() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_withBlobStorePath"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); final File summaryFile = new File(testOutDir, "summary.json"); AbstractITestWithPlanMojo mojo = newMojo(); final File blobStore = new File(testOutDir, "blobStore"); mojo.deferBuildFailure = true; mojo.blobStorePath = blobStore.getAbsolutePath(); scanWithViolations(mojo, summaryFile); MockMojoLog log = (MockMojoLog) mojo.getLog(); assertTrue("last message matches", log.last().filter(entry -> entry.message.startsWith("Evaluation of check reports")).isPresent()); final File[] preChildren = blobStore.listFiles(); assertNull("blobStore file is not yet a directory", preChildren); mojo.storeBlobs = true; scanWithViolations(mojo, summaryFile); final File[] children = blobStore.listFiles(); assertNotNull("blobStore file is directory", children); assertTrue("blobStore has children", children.length > 0); } @Test(expected = MojoFailureException.class) public void testPerformScan_writeSummaryFailure() throws Exception { final File testOutDir = new File(testOutBaseDir, "testPerformScan_writeSummaryFailure"); FileUtils.deleteDirectory(testOutDir); testOutDir.mkdirs(); final File summaryFile = new File(testOutDir, "summarydir"); summaryFile.mkdirs(); AbstractITestWithPlanMojo mojo = newMojo(); mojo.deferBuildFailure = true; scanWithViolations(mojo, summaryFile); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobmanager; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.HighAvailabilityOptions; import org.apache.flink.core.fs.FSDataInputStream; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.akka.ListeningBehaviour; import org.apache.flink.runtime.blob.BlobServer; import org.apache.flink.runtime.checkpoint.CheckpointIDCounter; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointMetrics; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.CheckpointRecoveryFactory; import org.apache.flink.runtime.checkpoint.CompletedCheckpointStore; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.runtime.checkpoint.StandaloneCheckpointIDCounter; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.execution.librarycache.BlobLibraryCacheManager; import org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders; import org.apache.flink.runtime.executiongraph.restart.FixedDelayRestartStrategy; import org.apache.flink.runtime.executiongraph.restart.RestartStrategyFactory; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices; import org.apache.flink.runtime.instance.ActorGateway; import org.apache.flink.runtime.instance.AkkaActorGateway; import org.apache.flink.runtime.instance.InstanceManager; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration; import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings; import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings; import org.apache.flink.runtime.jobgraph.tasks.StatefulTask; import org.apache.flink.runtime.jobmanager.scheduler.Scheduler; import org.apache.flink.runtime.leaderelection.LeaderElectionService; import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService; import org.apache.flink.runtime.leaderelection.TestingLeaderRetrievalService; import org.apache.flink.runtime.messages.JobManagerMessages; import org.apache.flink.runtime.metrics.NoOpMetricRegistry; import org.apache.flink.runtime.metrics.groups.JobManagerMetricGroup; import org.apache.flink.runtime.state.OperatorStateHandle; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.apache.flink.runtime.taskmanager.TaskManager; import org.apache.flink.runtime.testingUtils.TestingJobManager; import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages; import org.apache.flink.runtime.testingUtils.TestingMessages; import org.apache.flink.runtime.testingUtils.TestingTaskManager; import org.apache.flink.runtime.testingUtils.TestingTaskManagerMessages; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.testutils.RecoverableCompletedCheckpointStore; import org.apache.flink.runtime.util.TestByteStreamStateHandleDeepCompare; import org.apache.flink.util.InstantiationUtil; import org.apache.flink.util.Preconditions; import org.apache.flink.util.TestLogger; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Identify; import akka.actor.PoisonPill; import akka.actor.Props; import akka.japi.pf.FI; import akka.japi.pf.ReceiveBuilder; import akka.pattern.Patterns; import akka.testkit.CallingThreadDispatcher; import akka.testkit.JavaTestKit; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import scala.Int; import scala.Option; import scala.PartialFunction; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.Deadline; import scala.concurrent.duration.FiniteDuration; import scala.runtime.BoxedUnit; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class JobManagerHARecoveryTest extends TestLogger { private static ActorSystem system; @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @BeforeClass public static void setup() { system = AkkaUtils.createLocalActorSystem(new Configuration()); } @AfterClass public static void teardown() { JavaTestKit.shutdownActorSystem(system); } /** * Tests that the persisted job is not removed from the SubmittedJobGraphStore if the JobManager * loses its leadership. Furthermore, it tests that the job manager can recover the job from * the SubmittedJobGraphStore and checkpoint state is recovered as well. */ @Test public void testJobRecoveryWhenLosingLeadership() throws Exception { FiniteDuration timeout = new FiniteDuration(30, TimeUnit.SECONDS); FiniteDuration jobRecoveryTimeout = new FiniteDuration(3, TimeUnit.SECONDS); Deadline deadline = new FiniteDuration(2, TimeUnit.MINUTES).fromNow(); Configuration flinkConfiguration = new Configuration(); UUID leaderSessionID = UUID.randomUUID(); UUID newLeaderSessionID = UUID.randomUUID(); int slots = 2; ActorRef archive = null; ActorRef jobManager = null; ActorRef taskManager = null; flinkConfiguration.setString(HighAvailabilityOptions.HA_MODE, "zookeeper"); flinkConfiguration.setString(HighAvailabilityOptions.HA_STORAGE_PATH, temporaryFolder.newFolder().toString()); flinkConfiguration.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, slots); flinkConfiguration.setLong(BlobServerOptions.CLEANUP_INTERVAL, 3_600L); try { Scheduler scheduler = new Scheduler(TestingUtils.defaultExecutionContext()); MySubmittedJobGraphStore mySubmittedJobGraphStore = new MySubmittedJobGraphStore(); CompletedCheckpointStore checkpointStore = new RecoverableCompletedCheckpointStore(); CheckpointIDCounter checkpointCounter = new StandaloneCheckpointIDCounter(); CheckpointRecoveryFactory checkpointStateFactory = new MyCheckpointRecoveryFactory(checkpointStore, checkpointCounter); TestingLeaderElectionService myLeaderElectionService = new TestingLeaderElectionService(); TestingLeaderRetrievalService myLeaderRetrievalService = new TestingLeaderRetrievalService( null, null); TestingHighAvailabilityServices testingHighAvailabilityServices = new TestingHighAvailabilityServices(); testingHighAvailabilityServices.setJobMasterLeaderRetriever(HighAvailabilityServices.DEFAULT_JOB_ID, myLeaderRetrievalService); InstanceManager instanceManager = new InstanceManager(); instanceManager.addInstanceListener(scheduler); archive = system.actorOf(JobManager.getArchiveProps(MemoryArchivist.class, 10, Option.<Path>empty())); BlobServer blobServer = new BlobServer( flinkConfiguration, testingHighAvailabilityServices.createBlobStore()); blobServer.start(); Props jobManagerProps = Props.create( TestingJobManager.class, flinkConfiguration, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), instanceManager, scheduler, blobServer, new BlobLibraryCacheManager(blobServer, FlinkUserCodeClassLoaders.ResolveOrder.CHILD_FIRST, new String[0]), archive, new FixedDelayRestartStrategy.FixedDelayRestartStrategyFactory(Int.MaxValue(), 100), timeout, myLeaderElectionService, mySubmittedJobGraphStore, checkpointStateFactory, jobRecoveryTimeout, new JobManagerMetricGroup(new NoOpMetricRegistry(), "localhost"), Option.<String>empty()); jobManager = system.actorOf(jobManagerProps); ActorGateway gateway = new AkkaActorGateway(jobManager, leaderSessionID); taskManager = TaskManager.startTaskManagerComponentsAndActor( flinkConfiguration, ResourceID.generate(), system, testingHighAvailabilityServices, new NoOpMetricRegistry(), "localhost", Option.apply("taskmanager"), true, TestingTaskManager.class); ActorGateway tmGateway = new AkkaActorGateway(taskManager, leaderSessionID); Future<Object> tmAlive = tmGateway.ask(TestingMessages.getAlive(), deadline.timeLeft()); Await.ready(tmAlive, deadline.timeLeft()); JobVertex sourceJobVertex = new JobVertex("Source"); sourceJobVertex.setInvokableClass(BlockingStatefulInvokable.class); sourceJobVertex.setParallelism(slots); JobGraph jobGraph = new JobGraph("TestingJob", sourceJobVertex); List<JobVertexID> vertexId = Collections.singletonList(sourceJobVertex.getID()); jobGraph.setSnapshotSettings(new JobCheckpointingSettings( vertexId, vertexId, vertexId, new CheckpointCoordinatorConfiguration( 100L, 10L * 60L * 1000L, 0L, 1, ExternalizedCheckpointSettings.none(), true), null)); BlockingStatefulInvokable.initializeStaticHelpers(slots); Future<Object> isLeader = gateway.ask( TestingJobManagerMessages.getNotifyWhenLeader(), deadline.timeLeft()); Future<Object> isConnectedToJobManager = tmGateway.ask( new TestingTaskManagerMessages.NotifyWhenRegisteredAtJobManager(jobManager), deadline.timeLeft()); // tell jobManager that he's the leader myLeaderElectionService.isLeader(leaderSessionID); // tell taskManager who's the leader myLeaderRetrievalService.notifyListener(gateway.path(), leaderSessionID); Await.ready(isLeader, deadline.timeLeft()); Await.ready(isConnectedToJobManager, deadline.timeLeft()); // submit blocking job Future<Object> jobSubmitted = gateway.ask( new JobManagerMessages.SubmitJob(jobGraph, ListeningBehaviour.DETACHED), deadline.timeLeft()); Await.ready(jobSubmitted, deadline.timeLeft()); // Wait for some checkpoints to complete BlockingStatefulInvokable.awaitCompletedCheckpoints(); Future<Object> jobRemoved = gateway.ask(new TestingJobManagerMessages.NotifyWhenJobRemoved(jobGraph.getJobID()), deadline.timeLeft()); // Revoke leadership myLeaderElectionService.notLeader(); // check that the job gets removed from the JobManager Await.ready(jobRemoved, deadline.timeLeft()); // but stays in the submitted job graph store assertTrue(mySubmittedJobGraphStore.contains(jobGraph.getJobID())); Future<Object> jobRunning = gateway.ask(new TestingJobManagerMessages.NotifyWhenJobStatus(jobGraph.getJobID(), JobStatus.RUNNING), deadline.timeLeft()); // Make JobManager again a leader myLeaderElectionService.isLeader(newLeaderSessionID); // tell the TaskManager about it myLeaderRetrievalService.notifyListener(gateway.path(), newLeaderSessionID); // wait that the job is recovered and reaches state RUNNING Await.ready(jobRunning, deadline.timeLeft()); Future<Object> jobFinished = gateway.ask(new TestingJobManagerMessages.NotifyWhenJobRemoved(jobGraph.getJobID()), deadline.timeLeft()); BlockingInvokable.unblock(); // wait til the job has finished Await.ready(jobFinished, deadline.timeLeft()); // check that the job has been removed from the submitted job graph store assertFalse(mySubmittedJobGraphStore.contains(jobGraph.getJobID())); // Check that state has been recovered long[] recoveredStates = BlockingStatefulInvokable.getRecoveredStates(); for (long state : recoveredStates) { boolean isExpected = state >= BlockingStatefulInvokable.NUM_CHECKPOINTS_TO_COMPLETE; assertTrue("Did not recover checkpoint state correctly, expecting >= " + BlockingStatefulInvokable.NUM_CHECKPOINTS_TO_COMPLETE + ", but state was " + state, isExpected); } } finally { if (archive != null) { archive.tell(PoisonPill.getInstance(), ActorRef.noSender()); } if (jobManager != null) { jobManager.tell(PoisonPill.getInstance(), ActorRef.noSender()); } if (taskManager != null) { taskManager.tell(PoisonPill.getInstance(), ActorRef.noSender()); } } } /** * Tests that a failing job recovery won't cause other job recoveries to fail. */ @Test public void testFailingJobRecovery() throws Exception { final FiniteDuration timeout = new FiniteDuration(10, TimeUnit.SECONDS); final FiniteDuration jobRecoveryTimeout = new FiniteDuration(0, TimeUnit.SECONDS); Deadline deadline = new FiniteDuration(1, TimeUnit.MINUTES).fromNow(); final Configuration flinkConfiguration = new Configuration(); UUID leaderSessionID = UUID.randomUUID(); ActorRef jobManager = null; JobID jobId1 = new JobID(); JobID jobId2 = new JobID(); // set HA mode to zookeeper so that we try to recover jobs flinkConfiguration.setString(HighAvailabilityOptions.HA_MODE, "zookeeper"); try { final SubmittedJobGraphStore submittedJobGraphStore = mock(SubmittedJobGraphStore.class); SubmittedJobGraph submittedJobGraph = mock(SubmittedJobGraph.class); when(submittedJobGraph.getJobId()).thenReturn(jobId2); when(submittedJobGraphStore.getJobIds()).thenReturn(Arrays.asList(jobId1, jobId2)); // fail the first job recovery when(submittedJobGraphStore.recoverJobGraph(eq(jobId1))).thenThrow(new Exception("Test exception")); // succeed the second job recovery when(submittedJobGraphStore.recoverJobGraph(eq(jobId2))).thenReturn(submittedJobGraph); final TestingLeaderElectionService myLeaderElectionService = new TestingLeaderElectionService(); final Collection<JobID> recoveredJobs = new ArrayList<>(2); BlobServer blobServer = mock(BlobServer.class); Props jobManagerProps = Props.create( TestingFailingHAJobManager.class, flinkConfiguration, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), mock(InstanceManager.class), mock(Scheduler.class), blobServer, new BlobLibraryCacheManager(blobServer, FlinkUserCodeClassLoaders.ResolveOrder.CHILD_FIRST, new String[0]), ActorRef.noSender(), new FixedDelayRestartStrategy.FixedDelayRestartStrategyFactory(Int.MaxValue(), 100), timeout, myLeaderElectionService, submittedJobGraphStore, mock(CheckpointRecoveryFactory.class), jobRecoveryTimeout, new JobManagerMetricGroup(new NoOpMetricRegistry(), "localhost"), recoveredJobs).withDispatcher(CallingThreadDispatcher.Id()); jobManager = system.actorOf(jobManagerProps); Future<Object> started = Patterns.ask(jobManager, new Identify(42), deadline.timeLeft().toMillis()); Await.ready(started, deadline.timeLeft()); // make the job manager the leader --> this triggers the recovery of all jobs myLeaderElectionService.isLeader(leaderSessionID); // check that we have successfully recovered the second job assertThat(recoveredJobs, containsInAnyOrder(jobId2)); } finally { TestingUtils.stopActor(jobManager); } } static class TestingFailingHAJobManager extends JobManager { private final Collection<JobID> recoveredJobs; public TestingFailingHAJobManager( Configuration flinkConfiguration, ScheduledExecutorService futureExecutor, Executor ioExecutor, InstanceManager instanceManager, Scheduler scheduler, BlobServer blobServer, BlobLibraryCacheManager libraryCacheManager, ActorRef archive, RestartStrategyFactory restartStrategyFactory, FiniteDuration timeout, LeaderElectionService leaderElectionService, SubmittedJobGraphStore submittedJobGraphs, CheckpointRecoveryFactory checkpointRecoveryFactory, FiniteDuration jobRecoveryTimeout, JobManagerMetricGroup jobManagerMetricGroup, Collection<JobID> recoveredJobs) { super( flinkConfiguration, futureExecutor, ioExecutor, instanceManager, scheduler, blobServer, libraryCacheManager, archive, restartStrategyFactory, timeout, leaderElectionService, submittedJobGraphs, checkpointRecoveryFactory, jobRecoveryTimeout, jobManagerMetricGroup, Option.empty()); this.recoveredJobs = recoveredJobs; } @Override public PartialFunction<Object, BoxedUnit> handleMessage() { return ReceiveBuilder.match( JobManagerMessages.RecoverSubmittedJob.class, new FI.UnitApply<JobManagerMessages.RecoverSubmittedJob>() { @Override public void apply(JobManagerMessages.RecoverSubmittedJob submitJob) throws Exception { recoveredJobs.add(submitJob.submittedJobGraph().getJobId()); } }).matchAny(new FI.UnitApply<Object>() { @Override public void apply(Object o) throws Exception { TestingFailingHAJobManager.super.handleMessage().apply(o); } }).build(); } } static class MyCheckpointRecoveryFactory implements CheckpointRecoveryFactory { private final CompletedCheckpointStore store; private final CheckpointIDCounter counter; public MyCheckpointRecoveryFactory(CompletedCheckpointStore store, CheckpointIDCounter counter) { this.store = store; this.counter = counter; } @Override public CompletedCheckpointStore createCheckpointStore(JobID jobId, int maxNumberOfCheckpointsToRetain, ClassLoader userClassLoader) throws Exception { return store; } @Override public CheckpointIDCounter createCheckpointIDCounter(JobID jobId) throws Exception { return counter; } } static class MySubmittedJobGraphStore implements SubmittedJobGraphStore { Map<JobID, SubmittedJobGraph> storedJobs = new HashMap<>(); @Override public void start(SubmittedJobGraphListener jobGraphListener) throws Exception { } @Override public void stop() throws Exception { } @Override public SubmittedJobGraph recoverJobGraph(JobID jobId) throws Exception { if (storedJobs.containsKey(jobId)) { return storedJobs.get(jobId); } else { return null; } } @Override public void putJobGraph(SubmittedJobGraph jobGraph) throws Exception { storedJobs.put(jobGraph.getJobId(), jobGraph); } @Override public void removeJobGraph(JobID jobId) throws Exception { storedJobs.remove(jobId); } @Override public Collection<JobID> getJobIds() throws Exception { return storedJobs.keySet(); } boolean contains(JobID jobId) { return storedJobs.containsKey(jobId); } } public static class BlockingInvokable extends AbstractInvokable { private static boolean blocking = true; private static Object lock = new Object(); @Override public void invoke() throws Exception { while (blocking) { synchronized (lock) { lock.wait(); } } } public static void unblock() { blocking = false; synchronized (lock) { lock.notifyAll(); } } } public static class BlockingStatefulInvokable extends BlockingInvokable implements StatefulTask { private static final int NUM_CHECKPOINTS_TO_COMPLETE = 5; private static volatile CountDownLatch completedCheckpointsLatch = new CountDownLatch(1); private static volatile long[] recoveredStates = new long[0]; private int completedCheckpoints = 0; @Override public void setInitialState( TaskStateSnapshot taskStateHandles) throws Exception { int subtaskIndex = getIndexInSubtaskGroup(); if (subtaskIndex < recoveredStates.length) { OperatorStateHandle operatorStateHandle = extractSingletonOperatorState(taskStateHandles); try (FSDataInputStream in = operatorStateHandle.openInputStream()) { recoveredStates[subtaskIndex] = InstantiationUtil.deserializeObject(in, getUserCodeClassLoader()); } } } @Override public boolean triggerCheckpoint(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) throws Exception { ByteStreamStateHandle byteStreamStateHandle = new TestByteStreamStateHandleDeepCompare( String.valueOf(UUID.randomUUID()), InstantiationUtil.serializeObject(checkpointMetaData.getCheckpointId())); Map<String, OperatorStateHandle.StateMetaInfo> stateNameToPartitionOffsets = new HashMap<>(1); stateNameToPartitionOffsets.put( "test-state", new OperatorStateHandle.StateMetaInfo(new long[]{0L}, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE)); OperatorStateHandle operatorStateHandle = new OperatorStateHandle(stateNameToPartitionOffsets, byteStreamStateHandle); TaskStateSnapshot checkpointStateHandles = new TaskStateSnapshot(); checkpointStateHandles.putSubtaskStateByOperatorID( OperatorID.fromJobVertexID(getEnvironment().getJobVertexId()), new OperatorSubtaskState( Collections.singletonList(operatorStateHandle), Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); getEnvironment().acknowledgeCheckpoint( checkpointMetaData.getCheckpointId(), new CheckpointMetrics(0L, 0L, 0L, 0L), checkpointStateHandles); return true; } @Override public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) throws Exception { throw new UnsupportedOperationException("should not be called!"); } @Override public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) { throw new UnsupportedOperationException("should not be called!"); } @Override public void notifyCheckpointComplete(long checkpointId) { if (completedCheckpoints++ > NUM_CHECKPOINTS_TO_COMPLETE) { completedCheckpointsLatch.countDown(); } } static void initializeStaticHelpers(int numSubtasks) { completedCheckpointsLatch = new CountDownLatch(numSubtasks); recoveredStates = new long[numSubtasks]; } static void awaitCompletedCheckpoints() throws InterruptedException { completedCheckpointsLatch.await(); } static long[] getRecoveredStates() { return recoveredStates; } private static OperatorStateHandle extractSingletonOperatorState(TaskStateSnapshot taskStateHandles) { Set<Map.Entry<OperatorID, OperatorSubtaskState>> subtaskStateMappings = taskStateHandles.getSubtaskStateMappings(); Preconditions.checkNotNull(subtaskStateMappings); Preconditions.checkState(subtaskStateMappings.size() == 1); OperatorSubtaskState subtaskState = subtaskStateMappings.iterator().next().getValue(); Collection<OperatorStateHandle> managedOperatorState = Preconditions.checkNotNull(subtaskState).getManagedOperatorState(); Preconditions.checkNotNull(managedOperatorState); Preconditions.checkState(managedOperatorState.size() == 1); return managedOperatorState.iterator().next(); } } }
package org.sgiusa.model; import java.io.Serializable; import java.lang.String; import java.util.Date; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import org.sgiusa.model.Organization; import org.sgiusa.model.OrganizationLevel; import org.sgiusa.model.User; /** * Generated by Nam. * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Organization", propOrder = { "id", "organizationId", "type", "level", "name", "abbrv", "zipCodes", "creationDate", "lastUpdate", "parent", "children", "creator", "provider", "accessors" }) @XmlRootElement(name = "organization") public class Organization implements Serializable { public static final long serialVersionUID = 1; @XmlAttribute(name = "id") private Long id; @XmlAttribute(name = "organization-id") private String organizationId; @XmlAttribute(name = "type") private String type; @XmlAttribute(name = "level") private OrganizationLevel level; @XmlAttribute(name = "name") private String name; @XmlAttribute(name = "abbrv") private String abbrv; @XmlAttribute(name = "zip-codes") private List<String> zipCodes; @XmlAttribute(name = "creation-date") private Date creationDate; @XmlAttribute(name = "last-update") private Date lastUpdate; @XmlElement(name = "parent") private Organization parent; @XmlElement(name = "children") private List<Organization> children; @XmlElement(name = "creator") private User creator; @XmlElement(name = "provider") private User provider; @XmlElement(name = "accessors") private List<User> accessors; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getOrganizationId() { return organizationId; } public void setOrganizationId(String organizationId) { this.organizationId = organizationId; } public String getType() { return type; } public void setType(String type) { this.type = type; } public OrganizationLevel getLevel() { return level; } public void setLevel(OrganizationLevel level) { this.level = level; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAbbrv() { return abbrv; } public void setAbbrv(String abbrv) { this.abbrv = abbrv; } public List<String> getZipCodes() { return zipCodes; } public void setZipCodes(List<String> zipCodes) { this.zipCodes = zipCodes; } public Date getCreationDate() { return creationDate; } public void setCreationDate(Date creationDate) { this.creationDate = creationDate; } public Date getLastUpdate() { return lastUpdate; } public void setLastUpdate(Date lastUpdate) { this.lastUpdate = lastUpdate; } public Organization getParent() { return parent; } public void setParent(Organization parent) { this.parent = parent; } public List<Organization> getChildren() { return children; } public void setChildren(List<Organization> children) { this.children = children; } public User getCreator() { return creator; } public void setCreator(User creator) { this.creator = creator; } public User getProvider() { return provider; } public void setProvider(User provider) { this.provider = provider; } public List<User> getAccessors() { return accessors; } public void setAccessors(List<User> accessors) { this.accessors = accessors; } public boolean equals(Object object) { if (object == null) return false; if (!object.getClass().isAssignableFrom(this.getClass())) return false; Organization other = (Organization) object; if (this.getOrganizationId() == null || other.getOrganizationId() == null || this.getId() == null || other.getId() == null) return this == other; if (this.getOrganizationId().equals(other.getOrganizationId()) && this.getId().equals(other.getId())) return false; return true; } public int hashCode() { if (getOrganizationId() != null) return getOrganizationId().hashCode(); return 0; } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.origin.sqs; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.regions.Regions; import com.amazonaws.services.sqs.AmazonSQS; import com.amazonaws.services.sqs.AmazonSQSAsync; import com.amazonaws.services.sqs.AmazonSQSAsyncClientBuilder; import com.amazonaws.services.sqs.AmazonSQSClientBuilder; import com.amazonaws.services.sqs.model.AmazonSQSException; import com.amazonaws.services.sqs.model.ListQueuesRequest; import com.amazonaws.services.sqs.model.ListQueuesResult; import com.google.common.base.Throwables; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.ToErrorContext; import com.streamsets.pipeline.api.base.BasePushSource; import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService; import com.streamsets.pipeline.stage.common.DefaultErrorRecordHandler; import com.streamsets.pipeline.stage.lib.aws.AWSUtil; import com.streamsets.pipeline.stage.lib.aws.AwsRegion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; public class SqsConsumer extends BasePushSource { private static final Logger LOG = LoggerFactory.getLogger(SqsConsumer.class); private static final String SQS_CONFIG_PREFIX = "sqsConfig."; private static final String SQS_THREAD_PREFIX = "SQS Consumer Worker - "; private final SqsConsumerConfigBean conf; private final BlockingQueue<Throwable> error = new SynchronousQueue<>(); private ExecutorService executorService; private ClientConfiguration clientConfiguration; private AWSCredentialsProvider credentials; private final Map<String, String> queueUrlToPrefix = new HashMap<>(); public SqsConsumer(SqsConsumerConfigBean conf) { this.conf = conf; } @Override protected List<ConfigIssue> init() { List<ConfigIssue> issues = super.init(); if (conf.connection.region == AwsRegion.OTHER && (conf.connection.endpoint == null || conf.connection.endpoint.isEmpty())) { issues.add(getContext().createConfigIssue(Groups.SQS.name(), SQS_CONFIG_PREFIX + "endpoint", Errors.SQS_01)); return issues; } try { clientConfiguration = AWSUtil.getClientConfiguration(conf.connection.proxyConfig); } catch (StageException e) { issues.add(getContext().createConfigIssue(Groups.SQS.name(), SQS_CONFIG_PREFIX + "proxyConfig", Errors.SQS_10, e.getMessage(), e )); return issues; } AmazonSQSClientBuilder builder = AmazonSQSClientBuilder.standard().withClientConfiguration(clientConfiguration); Regions region = Regions.DEFAULT_REGION; if (conf.connection.region == AwsRegion.OTHER) { builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(conf.connection.endpoint, null)); } else { region = Regions.fromName(conf.connection.region.getId().toLowerCase()); builder.withRegion(region); } try { credentials = AWSUtil.getCredentialsProvider( conf.connection.awsConfig, conf.connection.proxyConfig, getContext(), region ); } catch (StageException e) { issues.add(getContext().createConfigIssue(Groups.SQS.name(), SQS_CONFIG_PREFIX + "awsConfig", Errors.SQS_11, e.getMessage(), e )); return issues; } if (conf.specifyQueueURL) { conf.queueUrls.forEach(url -> queueUrlToPrefix.put(url, url)); } else { AmazonSQS validationClient = builder.withCredentials(credentials).build(); for (int i = 0; i < conf.queuePrefixes.size(); i++) { final String queueNamePrefix = conf.queuePrefixes.get(i); ListQueuesResult result = validationClient.listQueues(new ListQueuesRequest(queueNamePrefix)); if (LOG.isDebugEnabled()) { LOG.debug("ListQueuesResult for prefix {}: {}", queueNamePrefix, result); } if (result.getQueueUrls().isEmpty()) { //TODO: set index in issue when API-138 is implemented issues.add(getContext().createConfigIssue(Groups.SQS.name(), SQS_CONFIG_PREFIX + "queuePrefixes", Errors.SQS_02, queueNamePrefix )); } result.getQueueUrls().forEach(url -> queueUrlToPrefix.put(url, queueNamePrefix)); } } if (queueUrlToPrefix.isEmpty()) { issues.add(getContext().createConfigIssue(Groups.SQS.name(), SQS_CONFIG_PREFIX + "queuePrefixes", Errors.SQS_09)); } return issues; } private AmazonSQSAsync buildAsyncClient() { final AmazonSQSAsyncClientBuilder builder = AmazonSQSAsyncClientBuilder.standard(); if (conf.connection.region == AwsRegion.OTHER) { builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(conf.connection.endpoint, null)); } else { builder.withRegion(conf.connection.region.getId()); } builder.setCredentials(credentials); builder.setClientConfiguration(clientConfiguration); return builder.build(); } @Override public void destroy() { Optional.ofNullable(executorService).ifPresent(ExecutorService::shutdownNow); super.destroy(); } @Override public int getNumberOfThreads() { return conf.numThreads; } @Override public void produce(Map<String, String> lastOffsets, int maxBatchSize) { try { int batchSize = Math.min(conf.maxBatchSize, maxBatchSize); if (!getContext().isPreview() && conf.maxBatchSize > maxBatchSize) { getContext().reportError(Errors.SQS_12, maxBatchSize); } final int numThreads = getNumberOfThreads(); executorService = new SafeScheduledExecutorService(numThreads, SQS_THREAD_PREFIX); ExecutorCompletionService<Exception> completionService = new ExecutorCompletionService<>(executorService); IntStream.range(0, numThreads).forEach(threadNumber -> { final List<String> threadQueueUrls = getQueueUrlsForThread(new ArrayList<>(queueUrlToPrefix.keySet()), threadNumber, numThreads ); final Map<String, String> threadQueueUrlsToNames = new HashMap<>(); threadQueueUrls.forEach(url -> threadQueueUrlsToNames.put(url, queueUrlToPrefix.get(url))); if (threadQueueUrlsToNames.isEmpty()) { if (LOG.isWarnEnabled()) { LOG.warn("No queues available for thread {}, so it will not be run", threadNumber); } } else { SqsConsumerWorkerCallable workerCallable = new SqsConsumerWorkerCallable(buildAsyncClient(), getContext(), threadQueueUrlsToNames, conf.numberOfMessagesPerRequest, conf.maxBatchTimeMs, batchSize, conf.connection.region.getId(), conf.sqsAttributesOption, new DefaultErrorRecordHandler(getContext(), (ToErrorContext) getContext()), conf.pollWaitTimeSeconds, conf.sqsMessageAttributeNames ); completionService.submit(workerCallable); } }); while (!getContext().isStopped()) { checkWorkerStatus(completionService); } } finally { shutdownExecutorIfNeeded(); } try { CompletableFuture.supplyAsync(() -> { while (!getContext().isStopped()) { // To handle OnError STOP_PIPELINE we keep checking for an exception thrown // by any record processor in order to perform a graceful shutdown. try { Throwable t = error.poll(100, TimeUnit.MILLISECONDS); if (t != null) { return Optional.of(t); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } return Optional.<Throwable>empty(); }).get().ifPresent(t -> { throw Throwables.propagate(t); }); } catch (InterruptedException | ExecutionException e) { throw Throwables.propagate(e); } } private static List<String> getQueueUrlsForThread(List<String> allUrls, int threadNumber, int maxThreads) { final List<String> urls = new LinkedList<>(); IntStream.range(0, allUrls.size()) .filter(i -> i % maxThreads == threadNumber) .forEach(i -> urls.add(allUrls.get(i))); return urls; } private void checkWorkerStatus(ExecutorCompletionService<Exception> completionService) { Future<Exception> future = completionService.poll(); if (future != null) { try { Exception terminatingException = future.get(); if (terminatingException != null) { if (terminatingException instanceof StageException) { throw (StageException) terminatingException; } else { throw new StageException(Errors.SQS_06, terminatingException.getMessage(), terminatingException); } } } catch (InterruptedException e) { LOG.error("Thread interrupted", e); Thread.currentThread().interrupt(); } catch (ExecutionException e) { Throwable cause = Throwables.getRootCause(e); if (cause instanceof StageException) { throw (StageException) cause; } else if (cause instanceof AmazonSQSException) { AmazonSQSException exception = (AmazonSQSException) cause; LOG.debug("Error while reading from SQS: %s", cause); throw new StageException( Errors.SQS_13, getQueueName(exception.getLocalizedMessage()), exception.getErrorCode() ); } else { LOG.error("ExecutionException attempting to get completion service result: {}", e.getMessage(), e); throw new StageException(Errors.SQS_03, e.toString(), e); } } } } private static String getQueueName(String localizedMessage) { return localizedMessage.split("https://")[1].split(" ")[0]; } private void shutdownExecutorIfNeeded() { Optional.ofNullable(executorService).ifPresent(executor -> { if (!executor.isTerminated()) { LOG.info("Shutting down executor service"); executor.shutdown(); } }); } }
package org.jkiss.dbeaver.ext.exasol.model.security; import java.sql.ResultSet; import java.util.ArrayList; import java.util.HashMap; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.exasol.ExasolMessages; import org.jkiss.dbeaver.ext.exasol.model.ExasolDataSource; import org.jkiss.dbeaver.model.DBPRefreshableObject; import org.jkiss.dbeaver.model.DBPSaveableObject; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.utils.CommonUtils; public class ExasolSecurityPolicy implements DBPRefreshableObject, DBPSaveableObject { private ExasolDataSource dataSource; private String name = ExasolMessages.exasol_security_policy_name; private String description = ExasolMessages.exasol_security_policy_description; private Boolean persisted; private Integer minLength; private Integer maxLength; private Integer minLowerCase; private Integer minUpperCase; private Integer minNumericChars; private Integer minSpecialChars; private Integer reusableAfterChanges; private Integer reusableAfterDays; private Integer maxFailedLoginAttempts; private Boolean enabled; public static HashMap<String,Integer> parseInput(String input) { if (input.equals("OFF")) { return new HashMap<String,Integer>(); } String[] parms = input.split(":"); HashMap<String,Integer> ret = new HashMap<String,Integer>(); for (int i = 0; i < parms.length; i++) { String parm = parms[i]; String[] data = parm.split("="); ret.put(data[0], Integer.parseInt(data[1])); } return ret; } private void assignValues(HashMap<String,Integer> values) { if (values.isEmpty()) { enabled=false; } for (String key : values.keySet()) { switch (key) { case "MIN_LENGTH": this.minLength = values.get(key); break; case "MAX_LENGTH": this.maxLength = values.get(key); break; case "MIN_LOWER_CASE": this.minLowerCase = values.get(key); break; case "MIN_UPPER_CASE": this.minUpperCase = values.get(key); break; case "MIN_NUMERIC_CHARS": this.minNumericChars = values.get(key); break; case "MIN_SPECIAL_CHARS": this.minSpecialChars = values.get(key); break; case "REUSABLE_AFTER_CHANGES": this.reusableAfterChanges = values.get(key); break; case "REUSABLE_AFTER_DAYS": this.reusableAfterDays = values.get(key); break; case "MAX_FAILED_LOGIN_ATTEMPTS": this.maxFailedLoginAttempts = values.get(key); break; default: break; } } } public ExasolSecurityPolicy(ExasolDataSource dataSource, ResultSet dbResult) { this.persisted = true; this.dataSource = dataSource; String value = JDBCUtils.safeGetString(dbResult, "SYSTEM_VALUE"); if (value.isEmpty() | value.equals("OFF")) { this.enabled = false; } else { assignValues(ExasolSecurityPolicy.parseInput(value)); } } @Override @Property(viewable = true, order = 20, multiline= true) public String getDescription() { return description; } @Override public DBSObject getParentObject() { return dataSource.getContainer(); } @Override public ExasolDataSource getDataSource() { return dataSource; } @Override @Property(viewable = true, order = 10) public String getName() { return name; } @Override public boolean isPersisted() { return persisted; } @Override public void setPersisted(boolean persisted) { this.persisted = persisted; } @Property(viewable = true, editable = true, updatable = true, order = 30) public Integer getMinLength() { return minLength; } public void setMinLength(Integer minLength) { this.minLength = minLength; this.enabled = true; } @Property(viewable = true, editable = true, updatable = true, order = 40) public Integer getMaxLength() { return maxLength; } public void setMaxLength(Integer maxLength) { this.enabled = true; this.maxLength = maxLength; } @Property(viewable = true, editable = true, updatable = true, order = 50) public Integer getMinLowerCase() { return minLowerCase; } public void setMinLowerCase(Integer minLowerCase) { this.minLowerCase = minLowerCase; this.enabled = true; } @Property(viewable = true, editable = true, updatable = true, order = 60) public Integer getMinUpperCase() { return minUpperCase; } public void setMinUpperCase(Integer minUpperCase) { this.minUpperCase = minUpperCase; this.enabled = true; } @Property(viewable = true, editable = true, updatable = true, order = 70) public Integer getMinSpecialChars() { return minSpecialChars; } public void setMinSpecialChars(Integer specialChars) { this.enabled = true; this.minSpecialChars = specialChars; } @Property(viewable = true, editable = true, updatable = true, order = 80) public Integer getReusableAfterChanges() { return reusableAfterChanges; } public void setReusableAfterChanges(Integer reusableAfterChanges) { this.enabled = true; this.reusableAfterChanges = reusableAfterChanges; } @Property(viewable = true, editable = true, updatable = true, order = 90) public Integer getReusableAfterDays() { return reusableAfterDays; } public void setReusableAfterDays(Integer reusableAfterDays) { this.enabled = true; this.reusableAfterDays = reusableAfterDays; } @Property(viewable = true, editable = true, updatable = true, order = 100) public Integer getMaxFailedLoginAttempts() { return maxFailedLoginAttempts; } public void setMaxFailedLoginAttempts(Integer maxFailedLoginAttempts) { this.enabled = true; this.maxFailedLoginAttempts = maxFailedLoginAttempts; } @Property(viewable = true, editable = true, updatable = true, order = 110) public Boolean getEnabled() { return enabled; } public void setEnabled(Boolean enabled) { this.enabled = enabled; if (! enabled) { this.maxFailedLoginAttempts = null; this.maxLength = null; this.minLength = null; this.minLowerCase = null; this.minUpperCase = null; this.minNumericChars = null; this.reusableAfterChanges = null; this.reusableAfterDays = null; this.minSpecialChars = null; } } @Property(viewable = true, editable = true, updatable = true, order = 75) public Integer getMinNumericChars() { return minNumericChars; } public void setMinNumericChars(Integer minNumericChars) { this.minNumericChars = minNumericChars; } @Override public DBSObject refreshObject(DBRProgressMonitor monitor) throws DBException { return this; } public String getSecurityString() { if (! enabled) { return "OFF"; } ArrayList<String> str = new ArrayList<>(); if (maxFailedLoginAttempts != null) { str.add("MAX_FAILED_LOGIN_ATTEMPTS="+maxFailedLoginAttempts.toString()); } if (minLength != null) { str.add("MIN_LENGTH="+minLength.toString()); } if (maxLength != null) { str.add("MAX_LENGTH="+maxLength.toString()); } if (minLowerCase != null) { str.add("MIN_LOWER_CASE="+minLowerCase.toString()); } if (maxFailedLoginAttempts != null) { str.add("MIN_UPPER_CASE="+minUpperCase.toString()); } if (minNumericChars != null) { str.add("MIN_NUMERIC_CHARS="+minNumericChars.toString()); } if (minSpecialChars != null) { str.add("MIN_SPECIAL_CHARS="+minSpecialChars.toString()); } if (reusableAfterChanges != null) { str.add("REUSABLE_AFTER_CHANGES="+reusableAfterChanges.toString()); } if (reusableAfterDays != null) { str.add("REUSABLE_AFTER_DAYS="+reusableAfterDays.toString()); } return CommonUtils.joinStrings(":", str); } }
/** * <copyright> * </copyright> * * $Id$ */ package soamodel.provider; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; import soamodel.ServiceMethod; import soamodel.SoamodelFactory; import soamodel.SoamodelPackage; /** * This is the item provider adapter for a {@link soamodel.ServiceMethod} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class ServiceMethodItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ServiceMethodItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public List getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addDescriptionPropertyDescriptor(object); addMethodTypePropertyDescriptor(object); addNamePropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Description feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addDescriptionPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ServiceMethod_description_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ServiceMethod_description_feature", "_UI_ServiceMethod_type"), SoamodelPackage.Literals.SERVICE_METHOD__DESCRIPTION, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Method Type feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addMethodTypePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ServiceMethod_methodType_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ServiceMethod_methodType_feature", "_UI_ServiceMethod_type"), SoamodelPackage.Literals.SERVICE_METHOD__METHOD_TYPE, true, false, true, null, null, null)); } /** * This adds a property descriptor for the Name feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addNamePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ServiceMethod_name_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ServiceMethod_name_feature", "_UI_ServiceMethod_type"), SoamodelPackage.Literals.SERVICE_METHOD__NAME, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Collection getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(SoamodelPackage.Literals.SERVICE_METHOD__PARAMETERS); childrenFeatures.add(SoamodelPackage.Literals.SERVICE_METHOD__POSTCONDITIONS); childrenFeatures.add(SoamodelPackage.Literals.SERVICE_METHOD__PRECONDITIONS); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns ServiceMethod.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/ServiceMethod")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getText(Object object) { String label = ((ServiceMethod)object).getName(); return label == null || label.length() == 0 ? getString("_UI_ServiceMethod_type") : getString("_UI_ServiceMethod_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(ServiceMethod.class)) { case SoamodelPackage.SERVICE_METHOD__DESCRIPTION: case SoamodelPackage.SERVICE_METHOD__NAME: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case SoamodelPackage.SERVICE_METHOD__PARAMETERS: case SoamodelPackage.SERVICE_METHOD__POSTCONDITIONS: case SoamodelPackage.SERVICE_METHOD__PRECONDITIONS: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void collectNewChildDescriptors(Collection newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (SoamodelPackage.Literals.SERVICE_METHOD__PARAMETERS, SoamodelFactory.eINSTANCE.createParameter())); newChildDescriptors.add (createChildParameter (SoamodelPackage.Literals.SERVICE_METHOD__POSTCONDITIONS, SoamodelFactory.eINSTANCE.createOCLPostcondition())); newChildDescriptors.add (createChildParameter (SoamodelPackage.Literals.SERVICE_METHOD__PRECONDITIONS, SoamodelFactory.eINSTANCE.createOCLPrecondition())); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ResourceLocator getResourceLocator() { return SoaModelEditPlugin.INSTANCE; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: hello.proto package net.dehora.grpc.spike.helloworld; /** * Protobuf type {@code net.dehora.grpc.spike.helloworld.HelloResponse} */ public final class HelloResponse extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:net.dehora.grpc.spike.helloworld.HelloResponse) HelloResponseOrBuilder { // Use HelloResponse.newBuilder() to construct. private HelloResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private HelloResponse() { message_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private HelloResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { String s = input.readStringRequireUtf8(); message_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw new RuntimeException(e.setUnfinishedMessage(this)); } catch (java.io.IOException e) { throw new RuntimeException( new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this)); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return net.dehora.grpc.spike.helloworld.HelloWorldProto.internal_static_net_dehora_grpc_spike_helloworld_HelloResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return net.dehora.grpc.spike.helloworld.HelloWorldProto.internal_static_net_dehora_grpc_spike_helloworld_HelloResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( net.dehora.grpc.spike.helloworld.HelloResponse.class, net.dehora.grpc.spike.helloworld.HelloResponse.Builder.class); } public static final int MESSAGE_FIELD_NUMBER = 1; private volatile java.lang.Object message_; /** * <code>optional string message = 1;</code> */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } } /** * <code>optional string message = 1;</code> */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getMessageBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, message_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getMessageBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, message_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static net.dehora.grpc.spike.helloworld.HelloResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(net.dehora.grpc.spike.helloworld.HelloResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code net.dehora.grpc.spike.helloworld.HelloResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:net.dehora.grpc.spike.helloworld.HelloResponse) net.dehora.grpc.spike.helloworld.HelloResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return net.dehora.grpc.spike.helloworld.HelloWorldProto.internal_static_net_dehora_grpc_spike_helloworld_HelloResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return net.dehora.grpc.spike.helloworld.HelloWorldProto.internal_static_net_dehora_grpc_spike_helloworld_HelloResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( net.dehora.grpc.spike.helloworld.HelloResponse.class, net.dehora.grpc.spike.helloworld.HelloResponse.Builder.class); } // Construct using net.dehora.grpc.spike.helloworld.HelloResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); message_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return net.dehora.grpc.spike.helloworld.HelloWorldProto.internal_static_net_dehora_grpc_spike_helloworld_HelloResponse_descriptor; } public net.dehora.grpc.spike.helloworld.HelloResponse getDefaultInstanceForType() { return net.dehora.grpc.spike.helloworld.HelloResponse.getDefaultInstance(); } public net.dehora.grpc.spike.helloworld.HelloResponse build() { net.dehora.grpc.spike.helloworld.HelloResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public net.dehora.grpc.spike.helloworld.HelloResponse buildPartial() { net.dehora.grpc.spike.helloworld.HelloResponse result = new net.dehora.grpc.spike.helloworld.HelloResponse(this); result.message_ = message_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof net.dehora.grpc.spike.helloworld.HelloResponse) { return mergeFrom((net.dehora.grpc.spike.helloworld.HelloResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(net.dehora.grpc.spike.helloworld.HelloResponse other) { if (other == net.dehora.grpc.spike.helloworld.HelloResponse.getDefaultInstance()) return this; if (!other.getMessage().isEmpty()) { message_ = other.message_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { net.dehora.grpc.spike.helloworld.HelloResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (net.dehora.grpc.spike.helloworld.HelloResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object message_ = ""; /** * <code>optional string message = 1;</code> */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string message = 1;</code> */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string message = 1;</code> */ public Builder setMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; onChanged(); return this; } /** * <code>optional string message = 1;</code> */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); onChanged(); return this; } /** * <code>optional string message = 1;</code> */ public Builder setMessageBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); message_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:net.dehora.grpc.spike.helloworld.HelloResponse) } // @@protoc_insertion_point(class_scope:net.dehora.grpc.spike.helloworld.HelloResponse) private static final net.dehora.grpc.spike.helloworld.HelloResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new net.dehora.grpc.spike.helloworld.HelloResponse(); } public static net.dehora.grpc.spike.helloworld.HelloResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<HelloResponse> PARSER = new com.google.protobuf.AbstractParser<HelloResponse>() { public HelloResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { try { return new HelloResponse(input, extensionRegistry); } catch (RuntimeException e) { if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); } throw e; } } }; public static com.google.protobuf.Parser<HelloResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<HelloResponse> getParserForType() { return PARSER; } public net.dehora.grpc.spike.helloworld.HelloResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules.keys; import com.facebook.buck.hashing.FileHashLoader; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.rules.AddsToRuleKey; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.DependencyAggregation; import com.facebook.buck.rules.HasDeclaredAndExtraDeps; import com.facebook.buck.rules.RuleKey; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.keys.hasher.RuleKeyHasher; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.hash.HashCode; import java.io.IOException; import java.nio.file.Path; import java.util.function.Function; /** * A factory for generating input-based {@link RuleKey}s. * * @see SupportsInputBasedRuleKey */ public final class InputBasedRuleKeyFactory implements RuleKeyFactory<RuleKey> { private final RuleKeyFieldLoader ruleKeyFieldLoader; private final FileHashLoader fileHashLoader; private final SourcePathResolver pathResolver; private final SourcePathRuleFinder ruleFinder; private final long inputSizeLimit; private final SingleBuildRuleKeyCache<Result<RuleKey>> ruleKeyCache = new SingleBuildRuleKeyCache<>(); public InputBasedRuleKeyFactory( RuleKeyFieldLoader ruleKeyFieldLoader, FileHashLoader hashLoader, SourcePathResolver pathResolver, SourcePathRuleFinder ruleFinder, long inputSizeLimit) { this.ruleKeyFieldLoader = ruleKeyFieldLoader; this.fileHashLoader = hashLoader; this.pathResolver = pathResolver; this.ruleFinder = ruleFinder; this.inputSizeLimit = inputSizeLimit; } @VisibleForTesting public InputBasedRuleKeyFactory( int seed, FileHashLoader hashLoader, SourcePathResolver pathResolver, SourcePathRuleFinder ruleFinder) { this(new RuleKeyFieldLoader(seed), hashLoader, pathResolver, ruleFinder, Long.MAX_VALUE); } private Result<RuleKey> calculateBuildRuleKey(BuildRule buildRule) { Builder<HashCode> builder = newVerifyingBuilder(buildRule); ruleKeyFieldLoader.setFields(builder, buildRule, RuleKeyType.INPUT); return builder.buildResult(RuleKey::new); } private Result<RuleKey> calculateRuleKeyAppendableKey(AddsToRuleKey appendable) { Builder<HashCode> subKeyBuilder = new Builder<>(RuleKeyBuilder.createDefaultHasher()); AlterRuleKeys.amendKey(subKeyBuilder, appendable); return subKeyBuilder.buildResult(RuleKey::new); } @Override public RuleKey build(BuildRule buildRule) { try { return ruleKeyCache.get(buildRule, this::calculateBuildRuleKey).getRuleKey(); } catch (RuntimeException e) { propagateIfSizeLimitException(e); throw e; } } private Result<RuleKey> buildAppendableKey(AddsToRuleKey appendable) { return ruleKeyCache.get(appendable, this::calculateRuleKeyAppendableKey); } private void propagateIfSizeLimitException(Throwable throwable) { // At the moment, it is difficult to make SizeLimitException be a checked exception. Due to how // exceptions are currently handled (e.g. LoadingCache wraps them with ExecutionException), // we need to iterate through the cause chain to check if a SizeLimitException is wrapped. Throwables.getCausalChain(throwable) .stream() .filter(t -> t instanceof SizeLimiter.SizeLimitException) .findFirst() .ifPresent(Throwables::throwIfUnchecked); } private Builder<HashCode> newVerifyingBuilder(final BuildRule rule) { final Iterable<DependencyAggregation> aggregatedRules = Iterables.filter(rule.getBuildDeps(), DependencyAggregation.class); return new Builder<HashCode>(RuleKeyBuilder.createDefaultHasher()) { private boolean hasEffectiveDirectDep(BuildRule dep) { for (BuildRule aggregationRule : aggregatedRules) { if (aggregationRule.getBuildDeps().contains(dep)) { return true; } } return false; } // Construct the rule key, verifying that all the deps we saw when constructing it // are explicit dependencies of the rule. @Override public <RESULT> Result<RESULT> buildResult(Function<HashCode, RESULT> mapper) { Result<RESULT> result = super.buildResult(mapper); for (BuildRule usedDep : result.getDeps()) { Preconditions.checkState( rule.getBuildDeps().contains(usedDep) || hasEffectiveDirectDep(usedDep) || (rule instanceof HasDeclaredAndExtraDeps && ((HasDeclaredAndExtraDeps) rule) .getTargetGraphOnlyDeps() .contains(usedDep)), "%s: %s not in deps (%s)", rule.getBuildTarget(), usedDep.getBuildTarget(), rule.getBuildDeps()); } return result; } }; } /* package */ class Builder<RULE_KEY> extends RuleKeyBuilder<RULE_KEY> { private final ImmutableList.Builder<Iterable<BuildRule>> deps = ImmutableList.builder(); private final SizeLimiter sizeLimiter = new SizeLimiter(inputSizeLimit); public Builder(RuleKeyHasher<RULE_KEY> hasher) { super(ruleFinder, pathResolver, fileHashLoader, hasher); } @Override protected Builder<RULE_KEY> setAddsToRuleKey(AddsToRuleKey appendable) { Result<RuleKey> result = InputBasedRuleKeyFactory.this.buildAppendableKey(appendable); deps.add(result.getDeps()); setAddsToRuleKey(result.getRuleKey()); return this; } @Override public Builder<RULE_KEY> setPath(Path absolutePath, Path ideallyRelative) throws IOException { // TODO(plamenko): this check should not be necessary, but otherwise some tests fail due to // FileHashLoader throwing NoSuchFileException which doesn't get correctly propagated. if (inputSizeLimit != Long.MAX_VALUE) { sizeLimiter.add(fileHashLoader.getSize(absolutePath)); } super.setPath(absolutePath, ideallyRelative); return this; } @Override protected Builder<RULE_KEY> setPath(ProjectFilesystem filesystem, Path relativePath) throws IOException { // TODO(plamenko): this check should not be necessary, but otherwise some tests fail due to // FileHashLoader throwing NoSuchFileException which doesn't get correctly propagated. if (inputSizeLimit != Long.MAX_VALUE) { sizeLimiter.add(fileHashLoader.getSize(filesystem, relativePath)); } super.setPath(filesystem, relativePath); return this; } @Override protected Builder<RULE_KEY> setNonHashingSourcePath(SourcePath sourcePath) { setNonHashingSourcePathDirectly(sourcePath); return this; } // Input-based rule keys are evaluated after all dependencies for a rule are available on // disk, and so we can always resolve the `Path` packaged in a `SourcePath`. We hash this, // rather than the rule key from it's `BuildRule`. @Override protected Builder<RULE_KEY> setSourcePath(SourcePath sourcePath) throws IOException { if (sourcePath instanceof BuildTargetSourcePath) { deps.add(ImmutableSet.of(ruleFinder.getRule((BuildTargetSourcePath) sourcePath))); // fall through and call setSourcePathDirectly as well } setSourcePathDirectly(sourcePath); return this; } // Rules supporting input-based rule keys should be described entirely by their `SourcePath` // inputs. If we see a `BuildRule` when generating the rule key, this is likely a break in // that contract, so check for that. @Override protected Builder<RULE_KEY> setBuildRule(BuildRule rule) { throw new IllegalStateException( String.format( "Input-based rule key builders cannot process build rules. " + "Was given %s to add to rule key.", rule)); } public <RESULT> Result<RESULT> buildResult(Function<RULE_KEY, RESULT> mapper) { return new Result<>(this.build(mapper), Iterables.concat(deps.build())); } } protected static class Result<RULE_KEY> { private final RULE_KEY ruleKey; private final Iterable<BuildRule> deps; public Result(RULE_KEY ruleKey, Iterable<BuildRule> deps) { this.ruleKey = ruleKey; this.deps = deps; } public RULE_KEY getRuleKey() { return ruleKey; } public Iterable<BuildRule> getDeps() { return deps; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.block.BlockSerdeUtil; import com.facebook.presto.hive.metastore.StorageFormat; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PageBuilder; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.type.ArrayType; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Decimals; import com.facebook.presto.spi.type.RowType; import com.facebook.presto.spi.type.SqlDate; import com.facebook.presto.spi.type.SqlDecimal; import com.facebook.presto.spi.type.SqlTimestamp; import com.facebook.presto.spi.type.SqlVarbinary; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.testing.MaterializedResult; import com.facebook.presto.testing.MaterializedRow; import com.facebook.presto.tests.StructuralTestUtil; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Slice; import io.airlift.slice.SliceOutput; import io.airlift.slice.Slices; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveDecimalObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobConf; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.concurrent.TimeUnit; import static com.facebook.presto.hive.HdfsConfigurationUpdater.configureCompression; import static com.facebook.presto.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static com.facebook.presto.hive.HiveColumnHandle.ColumnType.REGULAR; import static com.facebook.presto.hive.HivePartitionKey.HIVE_DEFAULT_DYNAMIC_PARTITION; import static com.facebook.presto.hive.HiveTestUtils.SESSION; import static com.facebook.presto.hive.HiveTestUtils.TYPE_MANAGER; import static com.facebook.presto.hive.HiveTestUtils.mapType; import static com.facebook.presto.hive.HiveUtil.isStructuralType; import static com.facebook.presto.hive.util.SerDeUtils.serializeObject; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.CharType.createCharType; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.facebook.presto.spi.type.RealType.REAL; import static com.facebook.presto.spi.type.SmallintType.SMALLINT; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.spi.type.Varchars.isVarcharType; import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream; import static com.facebook.presto.tests.StructuralTestUtil.arrayBlockOf; import static com.facebook.presto.tests.StructuralTestUtil.decimalArrayBlockOf; import static com.facebook.presto.tests.StructuralTestUtil.decimalMapBlockOf; import static com.facebook.presto.tests.StructuralTestUtil.mapBlockOf; import static com.facebook.presto.tests.StructuralTestUtil.rowBlockOf; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Predicates.not; import static com.google.common.base.Strings.padEnd; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Iterables.transform; import static java.lang.Float.intBitsToFloat; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Arrays.fill; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo; import static org.joda.time.DateTimeZone.UTC; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; @Test(groups = "hive") public abstract class AbstractTestHiveFileFormats { private static final double EPSILON = 0.001; private static final long DATE_MILLIS_UTC = new DateTime(2011, 5, 6, 0, 0, UTC).getMillis(); private static final long DATE_DAYS = TimeUnit.MILLISECONDS.toDays(DATE_MILLIS_UTC); private static final String DATE_STRING = DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC().print(DATE_MILLIS_UTC); private static final Date SQL_DATE = new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), DATE_MILLIS_UTC)); private static final long TIMESTAMP = new DateTime(2011, 5, 6, 7, 8, 9, 123).getMillis(); private static final String TIMESTAMP_STRING = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS").print(TIMESTAMP); private static final String VARCHAR_MAX_LENGTH_STRING; static { char[] varcharMaxLengthCharArray = new char[HiveVarchar.MAX_VARCHAR_LENGTH]; fill(varcharMaxLengthCharArray, 'a'); VARCHAR_MAX_LENGTH_STRING = new String(varcharMaxLengthCharArray); } private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_2 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(2, 1)); private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_4 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(4, 2)); private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_8 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(8, 4)); private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_17 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(17, 8)); private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_18 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(18, 8)); private static final JavaHiveDecimalObjectInspector DECIMAL_INSPECTOR_PRECISION_38 = new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(38, 16)); private static final DecimalType DECIMAL_TYPE_PRECISION_2 = DecimalType.createDecimalType(2, 1); private static final DecimalType DECIMAL_TYPE_PRECISION_4 = DecimalType.createDecimalType(4, 2); private static final DecimalType DECIMAL_TYPE_PRECISION_8 = DecimalType.createDecimalType(8, 4); private static final DecimalType DECIMAL_TYPE_PRECISION_17 = DecimalType.createDecimalType(17, 8); private static final DecimalType DECIMAL_TYPE_PRECISION_18 = DecimalType.createDecimalType(18, 8); private static final DecimalType DECIMAL_TYPE_PRECISION_38 = DecimalType.createDecimalType(38, 16); private static final HiveDecimal WRITE_DECIMAL_PRECISION_2 = HiveDecimal.create(new BigDecimal("-1.2")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_4 = HiveDecimal.create(new BigDecimal("12.3")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_8 = HiveDecimal.create(new BigDecimal("-1234.5678")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_17 = HiveDecimal.create(new BigDecimal("123456789.1234")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_18 = HiveDecimal.create(new BigDecimal("-1234567890.12345678")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_38 = HiveDecimal.create(new BigDecimal("1234567890123456789012.12345678")); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_2 = new BigDecimal("-1.2"); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_4 = new BigDecimal("12.30"); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_8 = new BigDecimal("-1234.5678"); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_17 = new BigDecimal("123456789.12340000"); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_18 = new BigDecimal("-1234567890.12345678"); private static final BigDecimal EXPECTED_DECIMAL_PRECISION_38 = new BigDecimal("1234567890123456789012.1234567800000000"); private static final JavaHiveCharObjectInspector CHAR_INSPECTOR_LENGTH_10 = new JavaHiveCharObjectInspector(getCharTypeInfo(10)); // TODO: support null values and determine if timestamp and binary are allowed as partition keys public static final List<TestColumn> TEST_COLUMNS = ImmutableList.<TestColumn>builder() .add(new TestColumn("p_empty_string", javaStringObjectInspector, "", Slices.EMPTY_SLICE, true)) .add(new TestColumn("p_string", javaStringObjectInspector, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("p_empty_varchar", javaHiveVarcharObjectInspector, "", Slices.EMPTY_SLICE, true)) .add(new TestColumn("p_varchar", javaHiveVarcharObjectInspector, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("p_varchar_max_length", javaHiveVarcharObjectInspector, VARCHAR_MAX_LENGTH_STRING, Slices.utf8Slice(VARCHAR_MAX_LENGTH_STRING), true)) .add(new TestColumn("p_char_10", CHAR_INSPECTOR_LENGTH_10, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("p_tinyint", javaByteObjectInspector, "1", (byte) 1, true)) .add(new TestColumn("p_smallint", javaShortObjectInspector, "2", (short) 2, true)) .add(new TestColumn("p_int", javaIntObjectInspector, "3", 3, true)) .add(new TestColumn("p_bigint", javaLongObjectInspector, "4", 4L, true)) .add(new TestColumn("p_float", javaFloatObjectInspector, "5.1", 5.1f, true)) .add(new TestColumn("p_double", javaDoubleObjectInspector, "6.2", 6.2, true)) .add(new TestColumn("p_boolean", javaBooleanObjectInspector, "true", true, true)) .add(new TestColumn("p_date", javaDateObjectInspector, DATE_STRING, DATE_DAYS, true)) .add(new TestColumn("p_timestamp", javaTimestampObjectInspector, TIMESTAMP_STRING, TIMESTAMP, true)) .add(new TestColumn("p_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, WRITE_DECIMAL_PRECISION_2.toString(), EXPECTED_DECIMAL_PRECISION_2, true)) .add(new TestColumn("p_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, WRITE_DECIMAL_PRECISION_4.toString(), EXPECTED_DECIMAL_PRECISION_4, true)) .add(new TestColumn("p_decimal_precision_8", DECIMAL_INSPECTOR_PRECISION_8, WRITE_DECIMAL_PRECISION_8.toString(), EXPECTED_DECIMAL_PRECISION_8, true)) .add(new TestColumn("p_decimal_precision_17", DECIMAL_INSPECTOR_PRECISION_17, WRITE_DECIMAL_PRECISION_17.toString(), EXPECTED_DECIMAL_PRECISION_17, true)) .add(new TestColumn("p_decimal_precision_18", DECIMAL_INSPECTOR_PRECISION_18, WRITE_DECIMAL_PRECISION_18.toString(), EXPECTED_DECIMAL_PRECISION_18, true)) .add(new TestColumn("p_decimal_precision_38", DECIMAL_INSPECTOR_PRECISION_38, WRITE_DECIMAL_PRECISION_38.toString() + "BD", EXPECTED_DECIMAL_PRECISION_38, true)) // .add(new TestColumn("p_binary", javaByteArrayObjectInspector, "test2", Slices.utf8Slice("test2"), true)) .add(new TestColumn("p_null_string", javaStringObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_varchar", javaHiveVarcharObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_char", CHAR_INSPECTOR_LENGTH_10, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_tinyint", javaByteObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_smallint", javaShortObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_int", javaIntObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_bigint", javaLongObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_float", javaFloatObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_double", javaDoubleObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_boolean", javaBooleanObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_date", javaDateObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_timestamp", javaTimestampObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_8", DECIMAL_INSPECTOR_PRECISION_8, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_17", DECIMAL_INSPECTOR_PRECISION_17, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_18", DECIMAL_INSPECTOR_PRECISION_18, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("p_null_decimal_precision_38", DECIMAL_INSPECTOR_PRECISION_38, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) // .add(new TestColumn("p_null_binary", javaByteArrayObjectInspector, HIVE_DEFAULT_DYNAMIC_PARTITION, null, true)) .add(new TestColumn("t_null_string", javaStringObjectInspector, null, null)) .add(new TestColumn("t_null_varchar", javaHiveVarcharObjectInspector, null, null)) .add(new TestColumn("t_null_char", CHAR_INSPECTOR_LENGTH_10, null, null)) .add(new TestColumn("t_null_array_int", getStandardListObjectInspector(javaIntObjectInspector), null, null)) .add(new TestColumn("t_null_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, null, null)) .add(new TestColumn("t_null_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, null, null)) .add(new TestColumn("t_null_decimal_precision_8", DECIMAL_INSPECTOR_PRECISION_8, null, null)) .add(new TestColumn("t_null_decimal_precision_17", DECIMAL_INSPECTOR_PRECISION_17, null, null)) .add(new TestColumn("t_null_decimal_precision_18", DECIMAL_INSPECTOR_PRECISION_18, null, null)) .add(new TestColumn("t_null_decimal_precision_38", DECIMAL_INSPECTOR_PRECISION_38, null, null)) .add(new TestColumn("t_empty_string", javaStringObjectInspector, "", Slices.EMPTY_SLICE)) .add(new TestColumn("t_string", javaStringObjectInspector, "test", Slices.utf8Slice("test"))) .add(new TestColumn("t_empty_varchar", javaHiveVarcharObjectInspector, new HiveVarchar("", HiveVarchar.MAX_VARCHAR_LENGTH), Slices.EMPTY_SLICE)) .add(new TestColumn("t_varchar", javaHiveVarcharObjectInspector, new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH), Slices.utf8Slice("test"))) .add(new TestColumn("t_varchar_max_length", javaHiveVarcharObjectInspector, new HiveVarchar(VARCHAR_MAX_LENGTH_STRING, HiveVarchar.MAX_VARCHAR_LENGTH), Slices.utf8Slice(VARCHAR_MAX_LENGTH_STRING))) .add(new TestColumn("t_char", CHAR_INSPECTOR_LENGTH_10, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("t_tinyint", javaByteObjectInspector, (byte) 1, (byte) 1)) .add(new TestColumn("t_smallint", javaShortObjectInspector, (short) 2, (short) 2)) .add(new TestColumn("t_int", javaIntObjectInspector, 3, 3)) .add(new TestColumn("t_bigint", javaLongObjectInspector, 4L, 4L)) .add(new TestColumn("t_float", javaFloatObjectInspector, 5.1f, 5.1f)) .add(new TestColumn("t_double", javaDoubleObjectInspector, 6.2, 6.2)) .add(new TestColumn("t_boolean_true", javaBooleanObjectInspector, true, true)) .add(new TestColumn("t_boolean_false", javaBooleanObjectInspector, false, false)) .add(new TestColumn("t_date", javaDateObjectInspector, SQL_DATE, DATE_DAYS)) .add(new TestColumn("t_timestamp", javaTimestampObjectInspector, new Timestamp(TIMESTAMP), TIMESTAMP)) .add(new TestColumn("t_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, WRITE_DECIMAL_PRECISION_2, EXPECTED_DECIMAL_PRECISION_2)) .add(new TestColumn("t_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, WRITE_DECIMAL_PRECISION_4, EXPECTED_DECIMAL_PRECISION_4)) .add(new TestColumn("t_decimal_precision_8", DECIMAL_INSPECTOR_PRECISION_8, WRITE_DECIMAL_PRECISION_8, EXPECTED_DECIMAL_PRECISION_8)) .add(new TestColumn("t_decimal_precision_17", DECIMAL_INSPECTOR_PRECISION_17, WRITE_DECIMAL_PRECISION_17, EXPECTED_DECIMAL_PRECISION_17)) .add(new TestColumn("t_decimal_precision_18", DECIMAL_INSPECTOR_PRECISION_18, WRITE_DECIMAL_PRECISION_18, EXPECTED_DECIMAL_PRECISION_18)) .add(new TestColumn("t_decimal_precision_38", DECIMAL_INSPECTOR_PRECISION_38, WRITE_DECIMAL_PRECISION_38, EXPECTED_DECIMAL_PRECISION_38)) .add(new TestColumn("t_binary", javaByteArrayObjectInspector, Slices.utf8Slice("test2").getBytes(), Slices.utf8Slice("test2"))) .add(new TestColumn("t_map_string", getStandardMapObjectInspector(javaStringObjectInspector, javaStringObjectInspector), ImmutableMap.of("test", "test"), mapBlockOf(createUnboundedVarcharType(), createUnboundedVarcharType(), "test", "test"))) .add(new TestColumn("t_map_tinyint", getStandardMapObjectInspector(javaByteObjectInspector, javaByteObjectInspector), ImmutableMap.of((byte) 1, (byte) 1), mapBlockOf(TINYINT, TINYINT, (byte) 1, (byte) 1))) .add(new TestColumn("t_map_varchar", getStandardMapObjectInspector(javaHiveVarcharObjectInspector, javaHiveVarcharObjectInspector), ImmutableMap.of(new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH), new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH)), mapBlockOf(createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), "test", "test"))) .add(new TestColumn("t_map_char", getStandardMapObjectInspector(CHAR_INSPECTOR_LENGTH_10, CHAR_INSPECTOR_LENGTH_10), ImmutableMap.of(new HiveChar("test", 10), new HiveChar("test", 10)), mapBlockOf(createCharType(10), createCharType(10), "test", "test"))) .add(new TestColumn("t_map_smallint", getStandardMapObjectInspector(javaShortObjectInspector, javaShortObjectInspector), ImmutableMap.of((short) 2, (short) 2), mapBlockOf(SMALLINT, SMALLINT, (short) 2, (short) 2))) .add(new TestColumn("t_map_null_key", getStandardMapObjectInspector(javaLongObjectInspector, javaLongObjectInspector), asMap(new Long[] {null, 2L}, new Long[] {0L, 3L}), mapBlockOf(BIGINT, BIGINT, 2, 3))) .add(new TestColumn("t_map_int", getStandardMapObjectInspector(javaIntObjectInspector, javaIntObjectInspector), ImmutableMap.of(3, 3), mapBlockOf(INTEGER, INTEGER, 3, 3))) .add(new TestColumn("t_map_bigint", getStandardMapObjectInspector(javaLongObjectInspector, javaLongObjectInspector), ImmutableMap.of(4L, 4L), mapBlockOf(BIGINT, BIGINT, 4L, 4L))) .add(new TestColumn("t_map_float", getStandardMapObjectInspector(javaFloatObjectInspector, javaFloatObjectInspector), ImmutableMap.of(5.0f, 5.0f), mapBlockOf(REAL, REAL, 5.0f, 5.0f))) .add(new TestColumn("t_map_double", getStandardMapObjectInspector(javaDoubleObjectInspector, javaDoubleObjectInspector), ImmutableMap.of(6.0, 6.0), mapBlockOf(DOUBLE, DOUBLE, 6.0, 6.0))) .add(new TestColumn("t_map_boolean", getStandardMapObjectInspector(javaBooleanObjectInspector, javaBooleanObjectInspector), ImmutableMap.of(true, true), mapBlockOf(BOOLEAN, BOOLEAN, true, true))) .add(new TestColumn("t_map_date", getStandardMapObjectInspector(javaDateObjectInspector, javaDateObjectInspector), ImmutableMap.of(SQL_DATE, SQL_DATE), mapBlockOf(DateType.DATE, DateType.DATE, DATE_DAYS, DATE_DAYS))) .add(new TestColumn("t_map_timestamp", getStandardMapObjectInspector(javaTimestampObjectInspector, javaTimestampObjectInspector), ImmutableMap.of(new Timestamp(TIMESTAMP), new Timestamp(TIMESTAMP)), mapBlockOf(TimestampType.TIMESTAMP, TimestampType.TIMESTAMP, TIMESTAMP, TIMESTAMP))) .add(new TestColumn("t_map_decimal_precision_2", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_2, DECIMAL_INSPECTOR_PRECISION_2), ImmutableMap.of(WRITE_DECIMAL_PRECISION_2, WRITE_DECIMAL_PRECISION_2), StructuralTestUtil.decimalMapBlockOf(DECIMAL_TYPE_PRECISION_2, EXPECTED_DECIMAL_PRECISION_2))) .add(new TestColumn("t_map_decimal_precision_4", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_4, DECIMAL_INSPECTOR_PRECISION_4), ImmutableMap.of(WRITE_DECIMAL_PRECISION_4, WRITE_DECIMAL_PRECISION_4), decimalMapBlockOf(DECIMAL_TYPE_PRECISION_4, EXPECTED_DECIMAL_PRECISION_4))) .add(new TestColumn("t_map_decimal_precision_8", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_8, DECIMAL_INSPECTOR_PRECISION_8), ImmutableMap.of(WRITE_DECIMAL_PRECISION_8, WRITE_DECIMAL_PRECISION_8), decimalMapBlockOf(DECIMAL_TYPE_PRECISION_8, EXPECTED_DECIMAL_PRECISION_8))) .add(new TestColumn("t_map_decimal_precision_17", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_17, DECIMAL_INSPECTOR_PRECISION_17), ImmutableMap.of(WRITE_DECIMAL_PRECISION_17, WRITE_DECIMAL_PRECISION_17), decimalMapBlockOf(DECIMAL_TYPE_PRECISION_17, EXPECTED_DECIMAL_PRECISION_17))) .add(new TestColumn("t_map_decimal_precision_18", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_18, DECIMAL_INSPECTOR_PRECISION_18), ImmutableMap.of(WRITE_DECIMAL_PRECISION_18, WRITE_DECIMAL_PRECISION_18), decimalMapBlockOf(DECIMAL_TYPE_PRECISION_18, EXPECTED_DECIMAL_PRECISION_18))) .add(new TestColumn("t_map_decimal_precision_38", getStandardMapObjectInspector(DECIMAL_INSPECTOR_PRECISION_38, DECIMAL_INSPECTOR_PRECISION_38), ImmutableMap.of(WRITE_DECIMAL_PRECISION_38, WRITE_DECIMAL_PRECISION_38), decimalMapBlockOf(DECIMAL_TYPE_PRECISION_38, EXPECTED_DECIMAL_PRECISION_38))) .add(new TestColumn("t_array_empty", getStandardListObjectInspector(javaStringObjectInspector), ImmutableList.of(), arrayBlockOf(createUnboundedVarcharType()))) .add(new TestColumn("t_array_string", getStandardListObjectInspector(javaStringObjectInspector), ImmutableList.of("test"), arrayBlockOf(createUnboundedVarcharType(), "test"))) .add(new TestColumn("t_array_tinyint", getStandardListObjectInspector(javaByteObjectInspector), ImmutableList.of((byte) 1), arrayBlockOf(TINYINT, (byte) 1))) .add(new TestColumn("t_array_smallint", getStandardListObjectInspector(javaShortObjectInspector), ImmutableList.of((short) 2), arrayBlockOf(SMALLINT, (short) 2))) .add(new TestColumn("t_array_int", getStandardListObjectInspector(javaIntObjectInspector), ImmutableList.of(3), arrayBlockOf(INTEGER, 3))) .add(new TestColumn("t_array_bigint", getStandardListObjectInspector(javaLongObjectInspector), ImmutableList.of(4L), arrayBlockOf(BIGINT, 4L))) .add(new TestColumn("t_array_float", getStandardListObjectInspector(javaFloatObjectInspector), ImmutableList.of(5.0f), arrayBlockOf(REAL, 5.0f))) .add(new TestColumn("t_array_double", getStandardListObjectInspector(javaDoubleObjectInspector), ImmutableList.of(6.0), StructuralTestUtil.arrayBlockOf(DOUBLE, 6.0))) .add(new TestColumn("t_array_boolean", getStandardListObjectInspector(javaBooleanObjectInspector), ImmutableList.of(true), arrayBlockOf(BOOLEAN, true))) .add(new TestColumn( "t_array_varchar", getStandardListObjectInspector(javaHiveVarcharObjectInspector), ImmutableList.of(new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH)), arrayBlockOf(createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), "test"))) .add(new TestColumn( "t_array_char", getStandardListObjectInspector(CHAR_INSPECTOR_LENGTH_10), ImmutableList.of(new HiveChar("test", 10)), arrayBlockOf(createCharType(10), "test"))) .add(new TestColumn("t_array_date", getStandardListObjectInspector(javaDateObjectInspector), ImmutableList.of(SQL_DATE), arrayBlockOf(DateType.DATE, DATE_DAYS))) .add(new TestColumn("t_array_timestamp", getStandardListObjectInspector(javaTimestampObjectInspector), ImmutableList.of(new Timestamp(TIMESTAMP)), StructuralTestUtil.arrayBlockOf(TimestampType.TIMESTAMP, TIMESTAMP))) .add(new TestColumn("t_array_decimal_precision_2", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_2), ImmutableList.of(WRITE_DECIMAL_PRECISION_2), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_2, EXPECTED_DECIMAL_PRECISION_2))) .add(new TestColumn("t_array_decimal_precision_4", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_4), ImmutableList.of(WRITE_DECIMAL_PRECISION_4), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_4, EXPECTED_DECIMAL_PRECISION_4))) .add(new TestColumn("t_array_decimal_precision_8", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_8), ImmutableList.of(WRITE_DECIMAL_PRECISION_8), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_8, EXPECTED_DECIMAL_PRECISION_8))) .add(new TestColumn("t_array_decimal_precision_17", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_17), ImmutableList.of(WRITE_DECIMAL_PRECISION_17), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_17, EXPECTED_DECIMAL_PRECISION_17))) .add(new TestColumn("t_array_decimal_precision_18", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_18), ImmutableList.of(WRITE_DECIMAL_PRECISION_18), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_18, EXPECTED_DECIMAL_PRECISION_18))) .add(new TestColumn("t_array_decimal_precision_38", getStandardListObjectInspector(DECIMAL_INSPECTOR_PRECISION_38), ImmutableList.of(WRITE_DECIMAL_PRECISION_38), decimalArrayBlockOf(DECIMAL_TYPE_PRECISION_38, EXPECTED_DECIMAL_PRECISION_38))) .add(new TestColumn("t_struct_bigint", getStandardStructObjectInspector(ImmutableList.of("s_bigint"), ImmutableList.of(javaLongObjectInspector)), new Long[] {1L}, rowBlockOf(ImmutableList.of(BIGINT), 1))) .add(new TestColumn("t_complex", getStandardMapObjectInspector( javaStringObjectInspector, getStandardListObjectInspector( getStandardStructObjectInspector( ImmutableList.of("s_int"), ImmutableList.of(javaIntObjectInspector)))), ImmutableMap.of("test", ImmutableList.<Object>of(new Integer[] {1})), mapBlockOf(createUnboundedVarcharType(), new ArrayType(RowType.anonymous(ImmutableList.of(INTEGER))), "test", arrayBlockOf(RowType.anonymous(ImmutableList.of(INTEGER)), rowBlockOf(ImmutableList.of(INTEGER), 1L))))) .add(new TestColumn("t_map_null_key_complex_value", getStandardMapObjectInspector( javaStringObjectInspector, getStandardMapObjectInspector(javaLongObjectInspector, javaBooleanObjectInspector)), asMap(new String[] {null, "k"}, new ImmutableMap[] {ImmutableMap.of(15L, true), ImmutableMap.of(16L, false)}), mapBlockOf(createUnboundedVarcharType(), mapType(BIGINT, BOOLEAN), "k", mapBlockOf(BIGINT, BOOLEAN, 16L, false)))) .add(new TestColumn("t_map_null_key_complex_key_value", getStandardMapObjectInspector( getStandardListObjectInspector(javaStringObjectInspector), getStandardMapObjectInspector(javaLongObjectInspector, javaBooleanObjectInspector)), asMap(new ImmutableList[] {null, ImmutableList.of("k", "ka")}, new ImmutableMap[] {ImmutableMap.of(15L, true), ImmutableMap.of(16L, false)}), mapBlockOf(new ArrayType(createUnboundedVarcharType()), mapType(BIGINT, BOOLEAN), arrayBlockOf(createUnboundedVarcharType(), "k", "ka"), mapBlockOf(BIGINT, BOOLEAN, 16L, false)))) .add(new TestColumn("t_struct_nested", getStandardStructObjectInspector(ImmutableList.of("struct_field"), ImmutableList.of(getStandardListObjectInspector(javaStringObjectInspector))), ImmutableList.of(ImmutableList.of("1", "2", "3")), rowBlockOf(ImmutableList.of(new ArrayType(createUnboundedVarcharType())), arrayBlockOf(createUnboundedVarcharType(), "1", "2", "3")))) .add(new TestColumn("t_struct_null", getStandardStructObjectInspector(ImmutableList.of("struct_field_null", "struct_field_null2"), ImmutableList.of(javaStringObjectInspector, javaStringObjectInspector)), Arrays.asList(null, null), rowBlockOf(ImmutableList.of(createUnboundedVarcharType(), createUnboundedVarcharType()), null, null))) .add(new TestColumn("t_struct_non_nulls_after_nulls", getStandardStructObjectInspector(ImmutableList.of("struct_non_nulls_after_nulls1", "struct_non_nulls_after_nulls2"), ImmutableList.of(javaIntObjectInspector, javaStringObjectInspector)), Arrays.asList(null, "some string"), rowBlockOf(ImmutableList.of(INTEGER, createUnboundedVarcharType()), null, "some string"))) .add(new TestColumn("t_nested_struct_non_nulls_after_nulls", getStandardStructObjectInspector( ImmutableList.of("struct_field1", "struct_field2", "strict_field3"), ImmutableList.of( javaIntObjectInspector, javaStringObjectInspector, getStandardStructObjectInspector( ImmutableList.of("nested_struct_field1", "nested_struct_field2"), ImmutableList.of(javaIntObjectInspector, javaStringObjectInspector)))), Arrays.asList(null, "some string", Arrays.asList(null, "nested_string2")), rowBlockOf( ImmutableList.of( INTEGER, createUnboundedVarcharType(), RowType.anonymous(ImmutableList.of(INTEGER, createUnboundedVarcharType()))), null, "some string", rowBlockOf(ImmutableList.of(INTEGER, createUnboundedVarcharType()), null, "nested_string2")))) .add(new TestColumn("t_map_null_value", getStandardMapObjectInspector(javaStringObjectInspector, javaStringObjectInspector), asMap(new String[] {"k1", "k2", "k3"}, new String[] {"v1", null, "v3"}), mapBlockOf(createUnboundedVarcharType(), createUnboundedVarcharType(), new String[] {"k1", "k2", "k3"}, new String[] {"v1", null, "v3"}))) .add(new TestColumn("t_array_string_starting_with_nulls", getStandardListObjectInspector(javaStringObjectInspector), Arrays.asList(null, "test"), arrayBlockOf(createUnboundedVarcharType(), null, "test"))) .add(new TestColumn("t_array_string_with_nulls_in_between", getStandardListObjectInspector(javaStringObjectInspector), Arrays.asList("test-1", null, "test-2"), arrayBlockOf(createUnboundedVarcharType(), "test-1", null, "test-2"))) .add(new TestColumn("t_array_string_ending_with_nulls", getStandardListObjectInspector(javaStringObjectInspector), Arrays.asList("test", null), arrayBlockOf(createUnboundedVarcharType(), "test", null))) .add(new TestColumn("t_array_string_all_nulls", getStandardListObjectInspector(javaStringObjectInspector), Arrays.asList(null, null, null), arrayBlockOf(createUnboundedVarcharType(), null, null, null))) .build(); private static <K, V> Map<K, V> asMap(K[] keys, V[] values) { checkArgument(keys.length == values.length, "array lengths don't match"); Map<K, V> map = new HashMap<>(); int len = keys.length; for (int i = 0; i < len; i++) { map.put(keys[i], values[i]); } return map; } protected List<HiveColumnHandle> getColumnHandles(List<TestColumn> testColumns) { List<HiveColumnHandle> columns = new ArrayList<>(); int nextHiveColumnIndex = 0; for (int i = 0; i < testColumns.size(); i++) { TestColumn testColumn = testColumns.get(i); int columnIndex = testColumn.isPartitionKey() ? -1 : nextHiveColumnIndex++; HiveType hiveType = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()); columns.add(new HiveColumnHandle(testColumn.getName(), hiveType, hiveType.getTypeSignature(), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); } return columns; } public static FileSplit createTestFile( String filePath, HiveStorageFormat storageFormat, HiveCompressionCodec compressionCodec, List<TestColumn> testColumns, ConnectorSession session, int numRows, HiveFileWriterFactory fileWriterFactory) { // filter out partition keys, which are not written to the file testColumns = ImmutableList.copyOf(filter(testColumns, not(TestColumn::isPartitionKey))); List<Type> types = testColumns.stream() .map(TestColumn::getType) .map(HiveType::valueOf) .map(type -> type.getType(TYPE_MANAGER)) .collect(toList()); PageBuilder pageBuilder = new PageBuilder(types); for (int rowNumber = 0; rowNumber < numRows; rowNumber++) { pageBuilder.declarePosition(); for (int columnNumber = 0; columnNumber < testColumns.size(); columnNumber++) { serializeObject( types.get(columnNumber), pageBuilder.getBlockBuilder(columnNumber), testColumns.get(columnNumber).getWriteValue(), testColumns.get(columnNumber).getObjectInspector(), false); } } Page page = pageBuilder.build(); JobConf jobConf = new JobConf(); configureCompression(jobConf, compressionCodec); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", Joiner.on(',').join(transform(testColumns, TestColumn::getName))); tableProperties.setProperty("columns.types", Joiner.on(',').join(transform(testColumns, TestColumn::getType))); Optional<HiveFileWriter> fileWriter = fileWriterFactory.createFileWriter( new Path(filePath), testColumns.stream() .map(TestColumn::getName) .collect(toList()), StorageFormat.fromHiveStorageFormat(storageFormat), tableProperties, jobConf, session); HiveFileWriter hiveFileWriter = fileWriter.orElseThrow(() -> new IllegalArgumentException("fileWriterFactory")); hiveFileWriter.appendRows(page); hiveFileWriter.commit(); return new FileSplit(new Path(filePath), 0, new File(filePath).length(), new String[0]); } public static FileSplit createTestFile( String filePath, HiveStorageFormat storageFormat, HiveCompressionCodec compressionCodec, List<TestColumn> testColumns, int numRows) throws Exception { HiveOutputFormat<?, ?> outputFormat = newInstance(storageFormat.getOutputFormat(), HiveOutputFormat.class); @SuppressWarnings("deprecation") SerDe serDe = newInstance(storageFormat.getSerDe(), SerDe.class); // filter out partition keys, which are not written to the file testColumns = ImmutableList.copyOf(filter(testColumns, not(TestColumn::isPartitionKey))); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", Joiner.on(',').join(transform(testColumns, TestColumn::getName))); tableProperties.setProperty("columns.types", Joiner.on(',').join(transform(testColumns, TestColumn::getType))); serDe.initialize(new Configuration(), tableProperties); JobConf jobConf = new JobConf(); configureCompression(jobConf, compressionCodec); RecordWriter recordWriter = outputFormat.getHiveRecordWriter( jobConf, new Path(filePath), Text.class, compressionCodec != HiveCompressionCodec.NONE, tableProperties, () -> {}); try { serDe.initialize(new Configuration(), tableProperties); SettableStructObjectInspector objectInspector = getStandardStructObjectInspector( ImmutableList.copyOf(transform(testColumns, TestColumn::getName)), ImmutableList.copyOf(transform(testColumns, TestColumn::getObjectInspector))); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); for (int rowNumber = 0; rowNumber < numRows; rowNumber++) { for (int i = 0; i < testColumns.size(); i++) { Object writeValue = testColumns.get(i).getWriteValue(); if (writeValue instanceof Slice) { writeValue = ((Slice) writeValue).getBytes(); } objectInspector.setStructFieldData(row, fields.get(i), writeValue); } Writable record = serDe.serialize(row, objectInspector); recordWriter.write(record); } } finally { recordWriter.close(false); } // todo to test with compression, the file must be renamed with the compression extension Path path = new Path(filePath); path.getFileSystem(new Configuration()).setVerifyChecksum(true); File file = new File(filePath); return new FileSplit(path, 0, file.length(), new String[0]); } private static <T> T newInstance(String className, Class<T> superType) throws ReflectiveOperationException { return HiveStorageFormat.class.getClassLoader().loadClass(className).asSubclass(superType).newInstance(); } protected void checkCursor(RecordCursor cursor, List<TestColumn> testColumns, int rowCount) { for (int row = 0; row < rowCount; row++) { assertTrue(cursor.advanceNextPosition()); for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) { TestColumn testColumn = testColumns.get(i); Object fieldFromCursor; Type type = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()).getType(TYPE_MANAGER); if (cursor.isNull(i)) { fieldFromCursor = null; } else if (BOOLEAN.equals(type)) { fieldFromCursor = cursor.getBoolean(i); } else if (TINYINT.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (SMALLINT.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (INTEGER.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (BIGINT.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (REAL.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (DOUBLE.equals(type)) { fieldFromCursor = cursor.getDouble(i); } else if (isVarcharType(type)) { fieldFromCursor = cursor.getSlice(i); } else if (isCharType(type)) { fieldFromCursor = cursor.getSlice(i); } else if (VARBINARY.equals(type)) { fieldFromCursor = cursor.getSlice(i); } else if (DateType.DATE.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (TimestampType.TIMESTAMP.equals(type)) { fieldFromCursor = cursor.getLong(i); } else if (isStructuralType(type)) { fieldFromCursor = cursor.getObject(i); } else if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; if (decimalType.isShort()) { fieldFromCursor = new BigDecimal(BigInteger.valueOf(cursor.getLong(i)), decimalType.getScale()); } else { fieldFromCursor = new BigDecimal(Decimals.decodeUnscaledValue(cursor.getSlice(i)), decimalType.getScale()); } } else { throw new RuntimeException("unknown type"); } if (fieldFromCursor == null) { assertEquals(null, testColumn.getExpectedValue(), String.format("Expected null for column %s", testColumn.getName())); } else if (testColumn.getObjectInspector().getTypeName().equals("float")) { int intBits = (int) ((long) fieldFromCursor); assertEquals(intBitsToFloat(intBits), (float) testColumn.getExpectedValue(), (float) EPSILON); } else if (testColumn.getObjectInspector().getTypeName().equals("double")) { assertEquals((double) fieldFromCursor, (double) testColumn.getExpectedValue(), EPSILON); } else if (testColumn.getObjectInspector().getTypeName().equals("tinyint")) { assertEquals(((Number) fieldFromCursor).byteValue(), testColumn.getExpectedValue()); } else if (testColumn.getObjectInspector().getTypeName().equals("smallint")) { assertEquals(((Number) fieldFromCursor).shortValue(), testColumn.getExpectedValue()); } else if (testColumn.getObjectInspector().getTypeName().equals("int")) { assertEquals(((Number) fieldFromCursor).intValue(), testColumn.getExpectedValue()); } else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) { assertEquals(fieldFromCursor, testColumn.getExpectedValue(), String.format("Wrong value for column %s", testColumn.getName())); } else { Block expected = (Block) testColumn.getExpectedValue(); Block actual = (Block) fieldFromCursor; assertBlockEquals(actual, expected, String.format("Wrong value for column %s", testColumn.getName())); } } } assertFalse(cursor.advanceNextPosition()); } protected void checkPageSource(ConnectorPageSource pageSource, List<TestColumn> testColumns, List<Type> types, int rowCount) throws IOException { try { MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, types); assertEquals(result.getMaterializedRows().size(), rowCount); for (MaterializedRow row : result) { for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) { TestColumn testColumn = testColumns.get(i); Type type = types.get(i); Object actualValue = row.getField(i); Object expectedValue = testColumn.getExpectedValue(); if (expectedValue instanceof Slice) { expectedValue = ((Slice) expectedValue).toStringUtf8(); } if (actualValue == null || expectedValue == null) { assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getTypeName().equals("float")) { assertEquals((float) actualValue, (float) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getTypeName().equals("double")) { assertEquals((double) actualValue, (double) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getTypeName().equals("date")) { SqlDate expectedDate = new SqlDate(((Long) expectedValue).intValue()); assertEquals(actualValue, expectedDate, "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getTypeName().equals("int") || testColumn.getObjectInspector().getTypeName().equals("smallint") || testColumn.getObjectInspector().getTypeName().equals("tinyint")) { assertEquals(actualValue, expectedValue); } else if (testColumn.getObjectInspector().getTypeName().equals("timestamp")) { SqlTimestamp expectedTimestamp = new SqlTimestamp((Long) expectedValue, SESSION.getTimeZoneKey()); assertEquals(actualValue, expectedTimestamp, "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getTypeName().startsWith("char")) { assertEquals(actualValue, padEnd((String) expectedValue, ((CharType) type).getLength(), ' '), "Wrong value for column " + testColumn.getName()); } else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) { if (expectedValue instanceof Slice) { expectedValue = ((Slice) expectedValue).toStringUtf8(); } if (actualValue instanceof Slice) { actualValue = ((Slice) actualValue).toStringUtf8(); } if (actualValue instanceof SqlVarbinary) { actualValue = new String(((SqlVarbinary) actualValue).getBytes(), UTF_8); } if (actualValue instanceof SqlDecimal) { actualValue = new BigDecimal(actualValue.toString()); } assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName()); } else { BlockBuilder builder = type.createBlockBuilder(null, 1); type.writeObject(builder, expectedValue); expectedValue = type.getObjectValue(SESSION, builder.build(), 0); assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName()); } } } } finally { pageSource.close(); } } private static void assertBlockEquals(Block actual, Block expected, String message) { assertEquals(blockToSlice(actual), blockToSlice(expected), message); } private static Slice blockToSlice(Block block) { // This function is strictly for testing use only SliceOutput sliceOutput = new DynamicSliceOutput(1000); BlockSerdeUtil.writeBlock(sliceOutput, block); return sliceOutput.slice(); } public static final class TestColumn { private final String name; private final ObjectInspector objectInspector; private final Object writeValue; private final Object expectedValue; private final boolean partitionKey; public TestColumn(String name, ObjectInspector objectInspector, Object writeValue, Object expectedValue) { this(name, objectInspector, writeValue, expectedValue, false); } public TestColumn(String name, ObjectInspector objectInspector, Object writeValue, Object expectedValue, boolean partitionKey) { this.name = requireNonNull(name, "name is null"); this.objectInspector = requireNonNull(objectInspector, "objectInspector is null"); this.writeValue = writeValue; this.expectedValue = expectedValue; this.partitionKey = partitionKey; } public String getName() { return name; } public String getType() { return objectInspector.getTypeName(); } public ObjectInspector getObjectInspector() { return objectInspector; } public Object getWriteValue() { return writeValue; } public Object getExpectedValue() { return expectedValue; } public boolean isPartitionKey() { return partitionKey; } @Override public String toString() { StringBuilder sb = new StringBuilder("TestColumn{"); sb.append("name='").append(name).append('\''); sb.append(", objectInspector=").append(objectInspector); sb.append(", writeValue=").append(writeValue); sb.append(", expectedValue=").append(expectedValue); sb.append(", partitionKey=").append(partitionKey); sb.append('}'); return sb.toString(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.operators.python.aggregate; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.memory.ByteArrayInputStreamWithPos; import org.apache.flink.core.memory.ByteArrayOutputStreamWithPos; import org.apache.flink.core.memory.DataInputViewStreamWrapper; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; import org.apache.flink.core.memory.ManagedMemoryUseCase; import org.apache.flink.fnexecution.v1.FlinkFnApi; import org.apache.flink.python.PythonFunctionRunner; import org.apache.flink.python.PythonOptions; import org.apache.flink.streaming.api.operators.python.AbstractOneInputPythonFunctionOperator; import org.apache.flink.streaming.api.utils.PythonOperatorUtils; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.table.data.RowData; import org.apache.flink.table.functions.python.PythonAggregateFunctionInfo; import org.apache.flink.table.functions.python.PythonEnv; import org.apache.flink.table.planner.plan.utils.KeySelectorUtil; import org.apache.flink.table.planner.typeutils.DataViewUtils; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.table.runtime.operators.python.utils.StreamRecordRowDataWrappingCollector; import org.apache.flink.table.runtime.runners.python.beam.BeamTableStatefulPythonFunctionRunner; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import org.apache.flink.table.runtime.typeutils.PythonTypeUtils; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.util.Preconditions; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; import static org.apache.flink.table.runtime.typeutils.PythonTypeUtils.toProtoType; /** * Base class for {@link AbstractPythonStreamGroupAggregateOperator} and {@link * PythonStreamGroupWindowAggregateOperator}. */ @Internal public abstract class AbstractPythonStreamAggregateOperator extends AbstractOneInputPythonFunctionOperator<RowData, RowData> { private static final long serialVersionUID = 1L; @VisibleForTesting protected static final String FLINK_AGGREGATE_FUNCTION_SCHEMA_CODER_URN = "flink:coder:schema:aggregate_function:v1"; @VisibleForTesting static final byte NORMAL_RECORD = 0; @VisibleForTesting static final byte TRIGGER_TIMER = 1; private final PythonAggregateFunctionInfo[] aggregateFunctions; private final DataViewUtils.DataViewSpec[][] dataViewSpecs; /** The input logical type. */ protected final RowType inputType; /** The output logical type. */ protected final RowType outputType; /** The options used to configure the Python worker process. */ private final Map<String, String> jobOptions; /** The array of the key indexes. */ private final int[] grouping; /** The index of a count aggregate used to calculate the number of accumulated rows. */ private final int indexOfCountStar; /** Generate retract messages if true. */ private final boolean generateUpdateBefore; /** The maximum NUMBER of the states cached in Python side. */ private final int stateCacheSize; /** The maximum number of cached entries in a single Python MapState. */ private final int mapStateReadCacheSize; private final int mapStateWriteCacheSize; private final String coderUrn; private final FlinkFnApi.CoderParam.OutputMode outputMode; private transient Object keyForTimerService; /** The user-defined function input logical type. */ protected transient RowType userDefinedFunctionInputType; /** The user-defined function output logical type. */ protected transient RowType userDefinedFunctionOutputType; /** The TypeSerializer for udf execution results. */ transient TypeSerializer<RowData> udfOutputTypeSerializer; /** The TypeSerializer for udf input elements. */ transient TypeSerializer<RowData> udfInputTypeSerializer; /** Reusable InputStream used to holding the execution results to be deserialized. */ protected transient ByteArrayInputStreamWithPos bais; /** InputStream Wrapper. */ protected transient DataInputViewStreamWrapper baisWrapper; /** Reusable OutputStream used to holding the serialized input elements. */ protected transient ByteArrayOutputStreamWithPos baos; /** OutputStream Wrapper. */ protected transient DataOutputViewStreamWrapper baosWrapper; /** The collector used to collect records. */ protected transient StreamRecordRowDataWrappingCollector rowDataWrapper; public AbstractPythonStreamAggregateOperator( Configuration config, RowType inputType, RowType outputType, PythonAggregateFunctionInfo[] aggregateFunctions, DataViewUtils.DataViewSpec[][] dataViewSpecs, int[] grouping, int indexOfCountStar, boolean generateUpdateBefore, String coderUrn, FlinkFnApi.CoderParam.OutputMode outputMode) { super(config); this.inputType = Preconditions.checkNotNull(inputType); this.outputType = Preconditions.checkNotNull(outputType); this.aggregateFunctions = aggregateFunctions; this.dataViewSpecs = dataViewSpecs; this.jobOptions = buildJobOptions(config); this.grouping = grouping; this.indexOfCountStar = indexOfCountStar; this.generateUpdateBefore = generateUpdateBefore; this.coderUrn = coderUrn; this.outputMode = outputMode; this.stateCacheSize = config.get(PythonOptions.STATE_CACHE_SIZE); this.mapStateReadCacheSize = config.get(PythonOptions.MAP_STATE_READ_CACHE_SIZE); this.mapStateWriteCacheSize = config.get(PythonOptions.MAP_STATE_WRITE_CACHE_SIZE); } @Override @SuppressWarnings("unchecked") public void open() throws Exception { bais = new ByteArrayInputStreamWithPos(); baisWrapper = new DataInputViewStreamWrapper(bais); baos = new ByteArrayOutputStreamWithPos(); baosWrapper = new DataOutputViewStreamWrapper(baos); userDefinedFunctionInputType = getUserDefinedFunctionInputType(); udfInputTypeSerializer = PythonTypeUtils.toBlinkTypeSerializer(userDefinedFunctionInputType); userDefinedFunctionOutputType = getUserDefinedFunctionOutputType(); udfOutputTypeSerializer = PythonTypeUtils.toBlinkTypeSerializer(userDefinedFunctionOutputType); rowDataWrapper = new StreamRecordRowDataWrappingCollector(output); super.open(); } @Override public void processElement(StreamRecord<RowData> element) throws Exception { RowData value = element.getValue(); processElementInternal(value); elementCount++; checkInvokeFinishBundleByCount(); emitResults(); } @Override public PythonFunctionRunner createPythonFunctionRunner() throws Exception { return new BeamTableStatefulPythonFunctionRunner( getRuntimeContext().getTaskName(), createPythonEnvironmentManager(), userDefinedFunctionInputType, userDefinedFunctionOutputType, getFunctionUrn(), getUserDefinedFunctionsProto(), coderUrn, jobOptions, getFlinkMetricContainer(), getKeyedStateBackend(), getKeySerializer(), getWindowSerializer(), getContainingTask().getEnvironment().getMemoryManager(), getOperatorConfig() .getManagedMemoryFractionOperatorUseCaseOfSlot( ManagedMemoryUseCase.PYTHON, getContainingTask() .getEnvironment() .getTaskManagerInfo() .getConfiguration(), getContainingTask() .getEnvironment() .getUserCodeClassLoader() .asClassLoader()), outputMode); } /** * As the beam state gRPC service will access the KeyedStateBackend in parallel with this * operator, we must override this method to prevent changing the current key of the * KeyedStateBackend while the beam service is handling requests. */ @Override public void setCurrentKey(Object key) { keyForTimerService = key; } @Override public Object getCurrentKey() { return keyForTimerService; } @Override public PythonEnv getPythonEnv() { return aggregateFunctions[0].getPythonFunction().getPythonEnv(); } @VisibleForTesting TypeSerializer getKeySerializer() { return PythonTypeUtils.toBlinkTypeSerializer(getKeyType()); } protected RowType getKeyType() { RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(grouping, InternalTypeInfo.of(inputType)); return selector.getProducedType().toRowType(); } TypeSerializer getWindowSerializer() { return null; } /** * Gets the proto representation of the Python user-defined aggregate functions to be executed. */ protected FlinkFnApi.UserDefinedAggregateFunctions getUserDefinedFunctionsProto() { FlinkFnApi.UserDefinedAggregateFunctions.Builder builder = FlinkFnApi.UserDefinedAggregateFunctions.newBuilder(); builder.setMetricEnabled(getPythonConfig().isMetricEnabled()); builder.addAllGrouping(Arrays.stream(grouping).boxed().collect(Collectors.toList())); builder.setGenerateUpdateBefore(generateUpdateBefore); builder.setIndexOfCountStar(indexOfCountStar); builder.setKeyType(toProtoType(getKeyType())); builder.setStateCacheSize(stateCacheSize); builder.setMapStateReadCacheSize(mapStateReadCacheSize); builder.setMapStateWriteCacheSize(mapStateWriteCacheSize); for (int i = 0; i < aggregateFunctions.length; i++) { DataViewUtils.DataViewSpec[] specs = null; if (i < dataViewSpecs.length) { specs = dataViewSpecs[i]; } builder.addUdfs( PythonOperatorUtils.getUserDefinedAggregateFunctionProto( aggregateFunctions[i], specs)); } return builder.build(); } public abstract String getFunctionUrn(); public abstract void processElementInternal(RowData value) throws Exception; public abstract RowType getUserDefinedFunctionInputType(); public abstract RowType getUserDefinedFunctionOutputType(); private Map<String, String> buildJobOptions(Configuration config) { Map<String, String> jobOptions = new HashMap<>(); if (config.containsKey("table.exec.timezone")) { jobOptions.put("table.exec.timezone", config.getString("table.exec.timezone", null)); } jobOptions.put( PythonOptions.STATE_CACHE_SIZE.key(), String.valueOf(config.get(PythonOptions.STATE_CACHE_SIZE))); jobOptions.put( PythonOptions.MAP_STATE_ITERATE_RESPONSE_BATCH_SIZE.key(), String.valueOf(config.get(PythonOptions.MAP_STATE_ITERATE_RESPONSE_BATCH_SIZE))); return jobOptions; } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Bits; import java.io.IOException; import java.util.Arrays; import java.util.Comparator; /** * Exposes {@link TermsEnum} API, merged from {@link TermsEnum} API of sub-segments. * This does a merge sort, by term text, of the sub-readers. * * @lucene.experimental */ public final class MultiTermsEnum extends TermsEnum { private final TermMergeQueue queue; private final TermsEnumWithSlice[] subs; // all of our subs (one per sub-reader) private final TermsEnumWithSlice[] currentSubs; // current subs that have at least one term for this field private final TermsEnumWithSlice[] top; private final MultiDocsEnum.EnumWithSlice[] subDocs; private final MultiDocsAndPositionsEnum.EnumWithSlice[] subDocsAndPositions; private BytesRef lastSeek; private boolean lastSeekExact; private final BytesRef lastSeekScratch = new BytesRef(); private int numTop; private int numSubs; private BytesRef current; private Comparator<BytesRef> termComp; static class TermsEnumIndex { public final static TermsEnumIndex[] EMPTY_ARRAY = new TermsEnumIndex[0]; final int subIndex; final TermsEnum termsEnum; public TermsEnumIndex(TermsEnum termsEnum, int subIndex) { this.termsEnum = termsEnum; this.subIndex = subIndex; } } /** Returns how many sub-reader slices contain the current * term. @see #getMatchArray */ public int getMatchCount() { return numTop; } /** Returns sub-reader slices positioned to the current term. */ public TermsEnumWithSlice[] getMatchArray() { return top; } /** Sole constructor. * @param slices Which sub-reader slices we should * merge. */ public MultiTermsEnum(ReaderSlice[] slices) { queue = new TermMergeQueue(slices.length); top = new TermsEnumWithSlice[slices.length]; subs = new TermsEnumWithSlice[slices.length]; subDocs = new MultiDocsEnum.EnumWithSlice[slices.length]; subDocsAndPositions = new MultiDocsAndPositionsEnum.EnumWithSlice[slices.length]; for(int i=0;i<slices.length;i++) { subs[i] = new TermsEnumWithSlice(i, slices[i]); subDocs[i] = new MultiDocsEnum.EnumWithSlice(); subDocs[i].slice = slices[i]; subDocsAndPositions[i] = new MultiDocsAndPositionsEnum.EnumWithSlice(); subDocsAndPositions[i].slice = slices[i]; } currentSubs = new TermsEnumWithSlice[slices.length]; } @Override public BytesRef term() { return current; } @Override public Comparator<BytesRef> getComparator() { return termComp; } /** The terms array must be newly created TermsEnum, ie * {@link TermsEnum#next} has not yet been called. */ public TermsEnum reset(TermsEnumIndex[] termsEnumsIndex) throws IOException { assert termsEnumsIndex.length <= top.length; numSubs = 0; numTop = 0; termComp = null; queue.clear(); for(int i=0;i<termsEnumsIndex.length;i++) { final TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; assert termsEnumIndex != null; // init our term comp if (termComp == null) { queue.termComp = termComp = termsEnumIndex.termsEnum.getComparator(); } else { // We cannot merge sub-readers that have // different TermComps final Comparator<BytesRef> subTermComp = termsEnumIndex.termsEnum.getComparator(); if (subTermComp != null && !subTermComp.equals(termComp)) { throw new IllegalStateException("sub-readers have different BytesRef.Comparators: " + subTermComp + " vs " + termComp + "; cannot merge"); } } final BytesRef term = termsEnumIndex.termsEnum.next(); if (term != null) { final TermsEnumWithSlice entry = subs[termsEnumIndex.subIndex]; entry.reset(termsEnumIndex.termsEnum, term); queue.add(entry); currentSubs[numSubs++] = entry; } else { // field has no terms } } if (queue.size() == 0) { return TermsEnum.EMPTY; } else { return this; } } @Override public boolean seekExact(BytesRef term) throws IOException { queue.clear(); numTop = 0; boolean seekOpt = false; if (lastSeek != null && termComp.compare(lastSeek, term) <= 0) { seekOpt = true; } lastSeek = null; lastSeekExact = true; for(int i=0;i<numSubs;i++) { final boolean status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = termComp.compare(term, curTerm); if (cmp == 0) { status = true; } else if (cmp < 0) { status = false; } else { status = currentSubs[i].terms.seekExact(term); } } else { status = false; } } else { status = currentSubs[i].terms.seekExact(term); } if (status) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); assert term.equals(currentSubs[i].current); } } // if at least one sub had exact match to the requested // term then we found match return numTop > 0; } @Override public SeekStatus seekCeil(BytesRef term) throws IOException { queue.clear(); numTop = 0; lastSeekExact = false; boolean seekOpt = false; if (lastSeek != null && termComp.compare(lastSeek, term) <= 0) { seekOpt = true; } lastSeekScratch.copyBytes(term); lastSeek = lastSeekScratch; for(int i=0;i<numSubs;i++) { final SeekStatus status; // LUCENE-2130: if we had just seek'd already, prior // to this seek, and the new seek term is after the // previous one, don't try to re-seek this sub if its // current term is already beyond this new seek term. // Doing so is a waste because this sub will simply // seek to the same spot. if (seekOpt) { final BytesRef curTerm = currentSubs[i].current; if (curTerm != null) { final int cmp = termComp.compare(term, curTerm); if (cmp == 0) { status = SeekStatus.FOUND; } else if (cmp < 0) { status = SeekStatus.NOT_FOUND; } else { status = currentSubs[i].terms.seekCeil(term); } } else { status = SeekStatus.END; } } else { status = currentSubs[i].terms.seekCeil(term); } if (status == SeekStatus.FOUND) { top[numTop++] = currentSubs[i]; current = currentSubs[i].current = currentSubs[i].terms.term(); } else { if (status == SeekStatus.NOT_FOUND) { currentSubs[i].current = currentSubs[i].terms.term(); assert currentSubs[i].current != null; queue.add(currentSubs[i]); } else { // enum exhausted currentSubs[i].current = null; } } } if (numTop > 0) { // at least one sub had exact match to the requested term return SeekStatus.FOUND; } else if (queue.size() > 0) { // no sub had exact match, but at least one sub found // a term after the requested term -- advance to that // next term: pullTop(); return SeekStatus.NOT_FOUND; } else { return SeekStatus.END; } } @Override public void seekExact(long ord) { throw new UnsupportedOperationException(); } @Override public long ord() { throw new UnsupportedOperationException(); } private void pullTop() { // extract all subs from the queue that have the same // top term assert numTop == 0; while(true) { top[numTop++] = queue.pop(); if (queue.size() == 0 || !(queue.top()).current.bytesEquals(top[0].current)) { break; } } current = top[0].current; } private void pushTop() throws IOException { // call next() on each top, and put back into queue for(int i=0;i<numTop;i++) { top[i].current = top[i].terms.next(); if (top[i].current != null) { queue.add(top[i]); } else { // no more fields in this reader } } numTop = 0; } @Override public BytesRef next() throws IOException { if (lastSeekExact) { // Must seekCeil at this point, so those subs that // didn't have the term can find the following term. // NOTE: we could save some CPU by only seekCeil the // subs that didn't match the last exact seek... but // most impls short-circuit if you seekCeil to term // they are already on. final SeekStatus status = seekCeil(current); assert status == SeekStatus.FOUND; lastSeekExact = false; } lastSeek = null; // restore queue pushTop(); // gather equal top fields if (queue.size() > 0) { pullTop(); } else { current = null; } return current; } @Override public int docFreq() throws IOException { int sum = 0; for(int i=0;i<numTop;i++) { sum += top[i].terms.docFreq(); } return sum; } @Override public long totalTermFreq() throws IOException { long sum = 0; for(int i=0;i<numTop;i++) { final long v = top[i].terms.totalTermFreq(); if (v == -1) { return v; } sum += v; } return sum; } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException { MultiDocsEnum docsEnum; // Can only reuse if incoming enum is also a MultiDocsEnum if (reuse != null && reuse instanceof MultiDocsEnum) { docsEnum = (MultiDocsEnum) reuse; // ... and was previously created w/ this MultiTermsEnum: if (!docsEnum.canReuse(this)) { docsEnum = new MultiDocsEnum(this, subs.length); } } else { docsEnum = new MultiDocsEnum(this, subs.length); } final MultiBits multiLiveDocs; if (liveDocs instanceof MultiBits) { multiLiveDocs = (MultiBits) liveDocs; } else { multiLiveDocs = null; } int upto = 0; for(int i=0;i<numTop;i++) { final TermsEnumWithSlice entry = top[i]; final Bits b; if (multiLiveDocs != null) { // optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we // just pull the liveDocs from the sub reader, rather // than making the inefficient // Slice(Multi(sub-readers)): final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(entry.subSlice); if (sub.matches) { b = sub.result; } else { // custom case: requested skip docs is foreign: // must slice it on every access b = new BitsSlice(liveDocs, entry.subSlice); } } else if (liveDocs != null) { b = new BitsSlice(liveDocs, entry.subSlice); } else { // no deletions b = null; } assert entry.index < docsEnum.subDocsEnum.length: entry.index + " vs " + docsEnum.subDocsEnum.length + "; " + subs.length; final DocsEnum subDocsEnum = entry.terms.docs(b, docsEnum.subDocsEnum[entry.index], flags); if (subDocsEnum != null) { docsEnum.subDocsEnum[entry.index] = subDocsEnum; subDocs[upto].docsEnum = subDocsEnum; subDocs[upto].slice = entry.subSlice; upto++; } else { // should this be an error? assert false : "One of our subs cannot provide a docsenum"; } } if (upto == 0) { return null; } else { return docsEnum.reset(subDocs, upto); } } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException { MultiDocsAndPositionsEnum docsAndPositionsEnum; // Can only reuse if incoming enum is also a MultiDocsAndPositionsEnum if (reuse != null && reuse instanceof MultiDocsAndPositionsEnum) { docsAndPositionsEnum = (MultiDocsAndPositionsEnum) reuse; // ... and was previously created w/ this MultiTermsEnum: if (!docsAndPositionsEnum.canReuse(this)) { docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.length); } } else { docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.length); } final MultiBits multiLiveDocs; if (liveDocs instanceof MultiBits) { multiLiveDocs = (MultiBits) liveDocs; } else { multiLiveDocs = null; } int upto = 0; for(int i=0;i<numTop;i++) { final TermsEnumWithSlice entry = top[i]; final Bits b; if (multiLiveDocs != null) { // Optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we // just pull the liveDocs from the sub reader, rather // than making the inefficient // Slice(Multi(sub-readers)): final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(top[i].subSlice); if (sub.matches) { b = sub.result; } else { // custom case: requested skip docs is foreign: // must slice it on every access (very // inefficient) b = new BitsSlice(liveDocs, top[i].subSlice); } } else if (liveDocs != null) { b = new BitsSlice(liveDocs, top[i].subSlice); } else { // no deletions b = null; } assert entry.index < docsAndPositionsEnum.subDocsAndPositionsEnum.length: entry.index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.length + "; " + subs.length; final DocsAndPositionsEnum subPostings = entry.terms.docsAndPositions(b, docsAndPositionsEnum.subDocsAndPositionsEnum[entry.index], flags); if (subPostings != null) { docsAndPositionsEnum.subDocsAndPositionsEnum[entry.index] = subPostings; subDocsAndPositions[upto].docsAndPositionsEnum = subPostings; subDocsAndPositions[upto].slice = entry.subSlice; upto++; } else { if (entry.terms.docs(b, null, DocsEnum.FLAG_NONE) != null) { // At least one of our subs does not store // offsets or positions -- we can't correctly // produce a MultiDocsAndPositions enum return null; } } } if (upto == 0) { return null; } else { return docsAndPositionsEnum.reset(subDocsAndPositions, upto); } } final static class TermsEnumWithSlice { private final ReaderSlice subSlice; TermsEnum terms; public BytesRef current; final int index; public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.subSlice = subSlice; this.index = index; assert subSlice.length >= 0: "length=" + subSlice.length; } public void reset(TermsEnum terms, BytesRef term) { this.terms = terms; current = term; } @Override public String toString() { return subSlice.toString()+":"+terms; } } private final static class TermMergeQueue extends PriorityQueue<TermsEnumWithSlice> { Comparator<BytesRef> termComp; TermMergeQueue(int size) { super(size); } @Override protected boolean lessThan(TermsEnumWithSlice termsA, TermsEnumWithSlice termsB) { final int cmp = termComp.compare(termsA.current, termsB.current); if (cmp != 0) { return cmp < 0; } else { return termsA.subSlice.start < termsB.subSlice.start; } } } @Override public String toString() { return "MultiTermsEnum(" + Arrays.toString(subs) + ")"; } }
package com.uet.beman.fragment; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.DialogInterface; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.Switch; import android.widget.TextView; import android.widget.ToggleButton; import com.uet.beman.R; import com.uet.beman.object.SentenceNode; import com.uet.beman.support.BM_MessageCardHandler; import com.uet.beman.support.BM_StorageHandler; import java.util.Arrays; import java.util.HashMap; /** * A simple {@link Fragment} subclass. * Activities that contain this fragment must implement the * {@link BM_FragmentMessageDialog.OnFragmentInteractionListener} interface * to handle interaction events. * Use the {@link BM_FragmentMessageDialog#newInstance} factory method to * create an instance of this fragment. */ public class BM_FragmentMessageDialog extends DialogFragment implements CompoundButton.OnCheckedChangeListener{ // TODO: Rename parameter arguments, choose names that match // the fragment initialization parameters, e.g. ARG_ITEM_NUMBER private static final String ARG_PARAM1 = "param1"; private static final String ARG_PARAM2 = "param2"; private SentenceNode currentNode; private Boolean nodeInit = true; private String msg; private String id; private char[] charArray = new char[7]; private BM_StorageHandler storageHandler; private BM_MessageCardHandler messageCardHandler; private HashMap<String, ToggleButton> btnSet; Boolean activate; View content, title; ToggleButton monBtn, tueBtn, wedBtn, thuBtn, friBtn, satBtn, sunBtn; Switch enabled; // TODO: Rename and change types of parameters private String mParam1; private String mParam2; MessageDialogListener mListener; /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * * @param param1 Parameter 1. * @param param2 Parameter 2. * @return A new instance of fragment BM_FragmentMessageDialog. */ // TODO: Rename and change types and number of parameters public static BM_FragmentMessageDialog newInstance(String param1, String param2) { BM_FragmentMessageDialog fragment = new BM_FragmentMessageDialog(); Bundle args = new Bundle(); args.putString(ARG_PARAM1, param1); args.putString(ARG_PARAM2, param2); fragment.setArguments(args); return fragment; } public BM_FragmentMessageDialog() { // Required empty public constructor } public void setNode(SentenceNode sentenceNode) { currentNode = sentenceNode; msg = sentenceNode.getMessage(); id = sentenceNode.getId(); if (sentenceNode.getDays() == null) Arrays.fill(charArray, '1'); else charArray = sentenceNode.getDays().toCharArray(); activate = (sentenceNode.getEnabled() == null || (sentenceNode.getEnabled().equals("1"))); if (sentenceNode.getEnabled() == null) nodeInit = false; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { mParam1 = getArguments().getString(ARG_PARAM1); mParam2 = getArguments().getString(ARG_PARAM2); } // Arrays.fill(charArray, '0'); btnSet = new HashMap<>(); storageHandler = BM_StorageHandler.getInstance(); messageCardHandler = BM_MessageCardHandler.getInstance(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return null; } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // getActivity().requestWindowFeature(Window.FEATURE_NO_TITLE); // getActivity().setContentView(R.layout.dialog_message_info_title); // Build the dialog and set up the button click handlers AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = getActivity().getLayoutInflater(); content = inflater.inflate(R.layout.dialog_message_info_content, null); title = inflater.inflate(R.layout.dialog_message_info_title, null); final EditText message = (EditText) content.findViewById(R.id.dialog_message_content); message.setText(msg); // Set title and content layout for dialog, as well as set up listener builder.setCustomTitle(title); builder.setView(content); setButtonDaysListener(content); setSwitchListener(title); builder.setPositiveButton(R.string.dialog_action_save, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // Send the positive button event back to the host activity msg = message.getText().toString(); currentNode.setEnabled(activate ? "1" : "0"); currentNode.setMessage(msg); currentNode.setDays(String.valueOf(charArray)); if(nodeInit) { storageHandler.updateItemInMessageSet(currentNode.getLabel(), currentNode); } else { currentNode.setId("0"); currentNode.setEnabled("1"); storageHandler.addItemInMessageSet(currentNode); } mListener.onDialogPositiveClick(BM_FragmentMessageDialog.this, currentNode); } }) .setNegativeButton(R.string.dialog_action_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // Send the negative button event back to the host activity mListener.onDialogNegativeClick(BM_FragmentMessageDialog.this); } }); return builder.create(); } /* The activity that creates an instance of this dialog fragment must * implement this interface in order to receive event callbacks. * Each method passes the DialogFragment in case the host needs to query it. */ public interface MessageDialogListener { public void onDialogPositiveClick(DialogFragment dialog, SentenceNode currentNode); public void onDialogNegativeClick(DialogFragment dialog); } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (MessageDialogListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement MessageDialogListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } private void setSwitchListener(View view) { enabled = (Switch) view.findViewById(R.id.dialog_message_switch); if(activate) { enabled.setChecked(true); enableDisableView(content, true); enableDisableView(title, true); } else { enabled.setChecked(false); enableDisableView(content, false); enableDisableView(title, false); } enabled.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) { //do stuff when Switch is ON enableDisableView(content, true); enableDisableView(title, true); } else { //do stuff when Switch if OFF enableDisableView(content, false); enableDisableView(title, false); } } }); } private void enableDisableView(View view, boolean enabled) { if(view == title) { TextView textView = (TextView) view.findViewById(R.id.dialog_message_title_text); textView.setEnabled(enabled); return; } else { view.setEnabled(enabled); } activate = enabled; if ( view instanceof ViewGroup ) { ViewGroup group = (ViewGroup)view; for ( int idx = 0 ; idx < group.getChildCount() ; idx++ ) { View child = group.getChildAt(idx); if ( child instanceof Button) { Button button = (Button)child; button.setEnabled(enabled); } enableDisableView(child, enabled); } } } private void setButtonDaysListener(View view) { monBtn = (ToggleButton) view.findViewById(R.id.dialog_monBtn); tueBtn = (ToggleButton) view.findViewById(R.id.dialog_tueBtn); wedBtn = (ToggleButton) view.findViewById(R.id.dialog_wedBtn); thuBtn = (ToggleButton) view.findViewById(R.id.dialog_thuBtn); friBtn = (ToggleButton) view.findViewById(R.id.dialog_friBtn); satBtn = (ToggleButton) view.findViewById(R.id.dialog_satBtn); sunBtn = (ToggleButton) view.findViewById(R.id.dialog_sunBtn); btnSet.put("0", monBtn); btnSet.put("1", tueBtn); btnSet.put("2", wedBtn); btnSet.put("3", thuBtn); btnSet.put("4", friBtn); btnSet.put("5", satBtn); btnSet.put("6", sunBtn); for(int i = 0; i < 7; i++) { if(charArray[i] == '1') { String idx = String.valueOf(i); ToggleButton btn = btnSet.get(idx); btn.setChecked(true); btnSet.put(idx, btn); } } monBtn.setOnCheckedChangeListener(this); tueBtn.setOnCheckedChangeListener(this); wedBtn.setOnCheckedChangeListener(this); thuBtn.setOnCheckedChangeListener(this); friBtn.setOnCheckedChangeListener(this); satBtn.setOnCheckedChangeListener(this); sunBtn.setOnCheckedChangeListener(this); } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // Button btn = (Button) view.findViewById(view.getId()); switch(buttonView.getId()) { case R.id.dialog_monBtn: customValue(buttonView, 0, isChecked); break; case R.id.dialog_tueBtn: customValue(buttonView, 1, isChecked); break; case R.id.dialog_wedBtn: customValue(buttonView, 2, isChecked); break; case R.id.dialog_thuBtn: customValue(buttonView, 3, isChecked); break; case R.id.dialog_friBtn: customValue(buttonView, 4, isChecked); break; case R.id.dialog_satBtn: customValue(buttonView, 5, isChecked); break; case R.id.dialog_sunBtn: customValue(buttonView, 6, isChecked); break; } } private void customValue(CompoundButton btn, int index, boolean isChecked) { if(isChecked) { charArray[index] = '1'; } else { charArray[index] = '0'; } } /** * This interface must be implemented by activities that contain this * fragment to allow an interaction in this fragment to be communicated * to the activity and potentially other fragments contained in that * activity. * <p/> * See the Android Training lesson <a href= * "http://developer.android.com/training/basics/fragments/communicating.html" * >Communicating with Other Fragments</a> for more information. */ public interface OnFragmentInteractionListener { // TODO: Update argument type and name public void onFragmentInteraction(Uri uri); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl; import java.util.Map; import org.camunda.bpm.engine.AuthorizationService; import org.camunda.bpm.engine.CaseService; import org.camunda.bpm.engine.DecisionService; import org.camunda.bpm.engine.ExternalTaskService; import org.camunda.bpm.engine.FilterService; import org.camunda.bpm.engine.FormService; import org.camunda.bpm.engine.HistoryService; import org.camunda.bpm.engine.IdentityService; import org.camunda.bpm.engine.ManagementService; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.ProcessEngines; import org.camunda.bpm.engine.RepositoryService; import org.camunda.bpm.engine.RuntimeService; import org.camunda.bpm.engine.TaskService; import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.camunda.bpm.engine.impl.cfg.TransactionContextFactory; import org.camunda.bpm.engine.impl.el.ExpressionManager; import org.camunda.bpm.engine.impl.history.HistoryLevel; import org.camunda.bpm.engine.impl.interceptor.CommandExecutor; import org.camunda.bpm.engine.impl.interceptor.SessionFactory; import org.camunda.bpm.engine.impl.jobexecutor.JobExecutor; import org.camunda.bpm.engine.impl.metrics.reporter.DbMetricsReporter; /** * @author Tom Baeyens */ public class ProcessEngineImpl implements ProcessEngine { private final static ProcessEngineLogger LOG = ProcessEngineLogger.INSTANCE; protected String name; protected RepositoryService repositoryService; protected RuntimeService runtimeService; protected HistoryService historicDataService; protected IdentityService identityService; protected TaskService taskService; protected FormService formService; protected ManagementService managementService; protected AuthorizationService authorizationService; protected CaseService caseService; protected FilterService filterService; protected ExternalTaskService externalTaskService; protected DecisionService decisionService; protected String databaseSchemaUpdate; protected JobExecutor jobExecutor; protected CommandExecutor commandExecutor; protected CommandExecutor commandExecutorSchemaOperations; protected Map<Class<?>, SessionFactory> sessionFactories; protected ExpressionManager expressionManager; protected HistoryLevel historyLevel; protected TransactionContextFactory transactionContextFactory; protected ProcessEngineConfigurationImpl processEngineConfiguration; public ProcessEngineImpl(ProcessEngineConfigurationImpl processEngineConfiguration) { this.processEngineConfiguration = processEngineConfiguration; this.name = processEngineConfiguration.getProcessEngineName(); this.repositoryService = processEngineConfiguration.getRepositoryService(); this.runtimeService = processEngineConfiguration.getRuntimeService(); this.historicDataService = processEngineConfiguration.getHistoryService(); this.identityService = processEngineConfiguration.getIdentityService(); this.taskService = processEngineConfiguration.getTaskService(); this.formService = processEngineConfiguration.getFormService(); this.managementService = processEngineConfiguration.getManagementService(); this.authorizationService = processEngineConfiguration.getAuthorizationService(); this.caseService = processEngineConfiguration.getCaseService(); this.filterService = processEngineConfiguration.getFilterService(); this.externalTaskService = processEngineConfiguration.getExternalTaskService(); this.decisionService = processEngineConfiguration.getDecisionService(); this.databaseSchemaUpdate = processEngineConfiguration.getDatabaseSchemaUpdate(); this.jobExecutor = processEngineConfiguration.getJobExecutor(); this.commandExecutor = processEngineConfiguration.getCommandExecutorTxRequired(); commandExecutorSchemaOperations = processEngineConfiguration.getCommandExecutorSchemaOperations(); this.sessionFactories = processEngineConfiguration.getSessionFactories(); this.historyLevel = processEngineConfiguration.getHistoryLevel(); this.transactionContextFactory = processEngineConfiguration.getTransactionContextFactory(); executeSchemaOperations(); if (name == null) { LOG.processEngineCreated("default"); } else { LOG.processEngineCreated(name); } ProcessEngines.registerProcessEngine(this); if ((jobExecutor != null)) { // register process engine with Job Executor jobExecutor.registerProcessEngine(this); } if (processEngineConfiguration.isMetricsEnabled()) { String reporterId = processEngineConfiguration.getMetricsReporterIdProvider().provideId(this); DbMetricsReporter dbMetricsReporter = processEngineConfiguration.getDbMetricsReporter(); dbMetricsReporter.setReporterId(reporterId); if(processEngineConfiguration.isDbMetricsReporterActivate()) { dbMetricsReporter.start(); } } } protected void executeSchemaOperations() { commandExecutorSchemaOperations.execute(new SchemaOperationsProcessEngineBuild()); } public void close() { ProcessEngines.unregister(this); if(processEngineConfiguration.isMetricsEnabled()) { processEngineConfiguration.getDbMetricsReporter().stop(); } if ((jobExecutor != null)) { // unregister process engine with Job Executor jobExecutor.unregisterProcessEngine(this); } commandExecutorSchemaOperations.execute(new SchemaOperationProcessEngineClose()); processEngineConfiguration.close(); } // getters and setters ////////////////////////////////////////////////////// public String getName() { return name; } public IdentityService getIdentityService() { return identityService; } public ManagementService getManagementService() { return managementService; } public TaskService getTaskService() { return taskService; } public HistoryService getHistoryService() { return historicDataService; } public RuntimeService getRuntimeService() { return runtimeService; } public RepositoryService getRepositoryService() { return repositoryService; } public FormService getFormService() { return formService; } public AuthorizationService getAuthorizationService() { return authorizationService; } public CaseService getCaseService() { return caseService; } public FilterService getFilterService() { return filterService; } public ExternalTaskService getExternalTaskService() { return externalTaskService; } public DecisionService getDecisionService() { return decisionService; } public ProcessEngineConfigurationImpl getProcessEngineConfiguration() { return processEngineConfiguration; } }
package AST; import java.util.HashSet; import java.util.LinkedHashSet; import java.io.File; import java.util.*; import beaver.*; import java.util.ArrayList; import java.util.zip.*; import java.io.*; import java.util.Stack; import java.util.regex.Pattern; import java.io.FileOutputStream; import java.io.IOException; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Element; import org.w3c.dom.Document; import java.util.HashMap; import java.util.Map.Entry; import javax.xml.transform.TransformerException; import javax.xml.parsers.ParserConfigurationException; import java.util.Collection; /** * @ast node * @declaredat java.ast:48 */ public class UnknownType extends ClassDecl implements Cloneable { /** * @apilvl low-level */ public void flushCache() { super.flushCache(); instanceOf_TypeDecl_values = null; typeDescriptor_computed = false; typeDescriptor_value = null; subtype_TypeDecl_values = null; } /** * @apilvl internal */ public void flushCollectionCache() { super.flushCollectionCache(); } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public UnknownType clone() throws CloneNotSupportedException { UnknownType node = (UnknownType)super.clone(); node.instanceOf_TypeDecl_values = null; node.typeDescriptor_computed = false; node.typeDescriptor_value = null; node.subtype_TypeDecl_values = null; node.in$Circle(false); node.is$Final(false); return node; } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public UnknownType copy() { try { UnknownType node = (UnknownType)clone(); if(children != null) node.children = (ASTNode[])children.clone(); return node; } catch (CloneNotSupportedException e) { } System.err.println("Error: Could not clone node of type " + getClass().getName() + "!"); return null; } /** * @apilvl low-level */ @SuppressWarnings({"unchecked", "cast"}) public UnknownType fullCopy() { UnknownType res = (UnknownType)copy(); for(int i = 0; i < getNumChildNoTransform(); i++) { ASTNode node = getChildNoTransform(i); if(node != null) node = node.fullCopy(); res.setChild(node, i); } return res; } /** * @ast method * @declaredat java.ast:1 */ public UnknownType() { super(); setChild(new Opt(), 1); setChild(new List(), 2); setChild(new List(), 3); } /** * @ast method * @declaredat java.ast:10 */ public UnknownType(Modifiers p0, String p1, Opt<Access> p2, List<Access> p3, List<BodyDecl> p4) { setChild(p0, 0); setID(p1); setChild(p2, 1); setChild(p3, 2); setChild(p4, 3); } /** * @ast method * @declaredat java.ast:17 */ public UnknownType(Modifiers p0, beaver.Symbol p1, Opt<Access> p2, List<Access> p3, List<BodyDecl> p4) { setChild(p0, 0); setID(p1); setChild(p2, 1); setChild(p3, 2); setChild(p4, 3); } /** * @apilvl low-level * @ast method * @declaredat java.ast:27 */ protected int numChildren() { return 4; } /** * @apilvl internal * @ast method * @declaredat java.ast:33 */ public boolean mayHaveRewrite() { return true; } /** * Setter for Modifiers * @apilvl high-level * @ast method * @declaredat java.ast:5 */ public void setModifiers(Modifiers node) { setChild(node, 0); } /** * Getter for Modifiers * @apilvl high-level * @ast method * @declaredat java.ast:12 */ public Modifiers getModifiers() { return (Modifiers)getChild(0); } /** * @apilvl low-level * @ast method * @declaredat java.ast:18 */ public Modifiers getModifiersNoTransform() { return (Modifiers)getChildNoTransform(0); } /** * Setter for lexeme ID * @apilvl high-level * @ast method * @declaredat java.ast:5 */ public void setID(String value) { tokenString_ID = value; } /** * @ast method * @declaredat java.ast:8 */ public void setID(beaver.Symbol symbol) { if(symbol.value != null && !(symbol.value instanceof String)) throw new UnsupportedOperationException("setID is only valid for String lexemes"); tokenString_ID = (String)symbol.value; IDstart = symbol.getStart(); IDend = symbol.getEnd(); } /** * Getter for lexeme ID * @apilvl high-level * @ast method * @declaredat java.ast:19 */ public String getID() { return tokenString_ID != null ? tokenString_ID : ""; } /** * Setter for SuperClassAccessOpt * @apilvl low-level * @ast method * @declaredat java.ast:5 */ public void setSuperClassAccessOpt(Opt<Access> opt) { setChild(opt, 1); } /** * Does this node have a SuperClassAccess child? * @apilvl high-level * @ast method * @declaredat java.ast:12 */ public boolean hasSuperClassAccess() { return getSuperClassAccessOpt().getNumChild() != 0; } /** * Getter for optional child SuperClassAccess * @apilvl high-level * @ast method * @declaredat java.ast:19 */ @SuppressWarnings({"unchecked", "cast"}) public Access getSuperClassAccess() { return (Access)getSuperClassAccessOpt().getChild(0); } /** * Setter for optional child SuperClassAccess * @apilvl high-level * @ast method * @declaredat java.ast:27 */ public void setSuperClassAccess(Access node) { getSuperClassAccessOpt().setChild(node, 0); } /** * @apilvl low-level * @ast method * @declaredat java.ast:37 */ @SuppressWarnings({"unchecked", "cast"}) public Opt<Access> getSuperClassAccessOpt() { return (Opt<Access>)getChild(1); } /** * @apilvl low-level * @ast method * @declaredat java.ast:44 */ @SuppressWarnings({"unchecked", "cast"}) public Opt<Access> getSuperClassAccessOptNoTransform() { return (Opt<Access>)getChildNoTransform(1); } /** * Setter for ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:5 */ public void setImplementsList(List<Access> list) { setChild(list, 2); } /** * @return number of children in ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:12 */ public int getNumImplements() { return getImplementsList().getNumChild(); } /** * Getter for child in list ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:19 */ @SuppressWarnings({"unchecked", "cast"}) public Access getImplements(int i) { return (Access)getImplementsList().getChild(i); } /** * Add element to list ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:27 */ public void addImplements(Access node) { List<Access> list = (parent == null || state == null) ? getImplementsListNoTransform() : getImplementsList(); list.addChild(node); } /** * @apilvl low-level * @ast method * @declaredat java.ast:34 */ public void addImplementsNoTransform(Access node) { List<Access> list = getImplementsListNoTransform(); list.addChild(node); } /** * Setter for child in list ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:42 */ public void setImplements(Access node, int i) { List<Access> list = getImplementsList(); list.setChild(node, i); } /** * Getter for Implements list. * @apilvl high-level * @ast method * @declaredat java.ast:50 */ public List<Access> getImplementss() { return getImplementsList(); } /** * @apilvl low-level * @ast method * @declaredat java.ast:56 */ public List<Access> getImplementssNoTransform() { return getImplementsListNoTransform(); } /** * Getter for list ImplementsList * @apilvl high-level * @ast method * @declaredat java.ast:63 */ @SuppressWarnings({"unchecked", "cast"}) public List<Access> getImplementsList() { List<Access> list = (List<Access>)getChild(2); list.getNumChild(); return list; } /** * @apilvl low-level * @ast method * @declaredat java.ast:72 */ @SuppressWarnings({"unchecked", "cast"}) public List<Access> getImplementsListNoTransform() { return (List<Access>)getChildNoTransform(2); } /** * Setter for BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:5 */ public void setBodyDeclList(List<BodyDecl> list) { setChild(list, 3); } /** * @return number of children in BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:12 */ public int getNumBodyDecl() { return getBodyDeclList().getNumChild(); } /** * Getter for child in list BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:19 */ @SuppressWarnings({"unchecked", "cast"}) public BodyDecl getBodyDecl(int i) { return (BodyDecl)getBodyDeclList().getChild(i); } /** * Add element to list BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:27 */ public void addBodyDecl(BodyDecl node) { List<BodyDecl> list = (parent == null || state == null) ? getBodyDeclListNoTransform() : getBodyDeclList(); list.addChild(node); } /** * @apilvl low-level * @ast method * @declaredat java.ast:34 */ public void addBodyDeclNoTransform(BodyDecl node) { List<BodyDecl> list = getBodyDeclListNoTransform(); list.addChild(node); } /** * Setter for child in list BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:42 */ public void setBodyDecl(BodyDecl node, int i) { List<BodyDecl> list = getBodyDeclList(); list.setChild(node, i); } /** * Getter for BodyDecl list. * @apilvl high-level * @ast method * @declaredat java.ast:50 */ public List<BodyDecl> getBodyDecls() { return getBodyDeclList(); } /** * @apilvl low-level * @ast method * @declaredat java.ast:56 */ public List<BodyDecl> getBodyDeclsNoTransform() { return getBodyDeclListNoTransform(); } /** * Getter for list BodyDeclList * @apilvl high-level * @ast method * @declaredat java.ast:63 */ @SuppressWarnings({"unchecked", "cast"}) public List<BodyDecl> getBodyDeclList() { List<BodyDecl> list = (List<BodyDecl>)getChild(3); list.getNumChild(); return list; } /** * @apilvl low-level * @ast method * @declaredat java.ast:72 */ @SuppressWarnings({"unchecked", "cast"}) public List<BodyDecl> getBodyDeclListNoTransform() { return (List<BodyDecl>)getChildNoTransform(3); } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:167 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isReferenceType() { ASTNode$State state = state(); boolean isReferenceType_value = isReferenceType_compute(); return isReferenceType_value; } /** * @apilvl internal */ private boolean isReferenceType_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:170 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isPrimitiveType() { ASTNode$State state = state(); boolean isPrimitiveType_value = isPrimitiveType_compute(); return isPrimitiveType_value; } /** * @apilvl internal */ private boolean isPrimitiveType_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:175 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isNumericType() { ASTNode$State state = state(); boolean isNumericType_value = isNumericType_compute(); return isNumericType_value; } /** * @apilvl internal */ private boolean isNumericType_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:179 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isIntegralType() { ASTNode$State state = state(); boolean isIntegralType_value = isIntegralType_compute(); return isIntegralType_value; } /** * @apilvl internal */ private boolean isIntegralType_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:183 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isBoolean() { ASTNode$State state = state(); boolean isBoolean_value = isBoolean_compute(); return isBoolean_value; } /** * @apilvl internal */ private boolean isBoolean_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:193 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isInt() { ASTNode$State state = state(); boolean isInt_value = isInt_compute(); return isInt_value; } /** * @apilvl internal */ private boolean isInt_compute() { return true; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:233 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isUnknown() { ASTNode$State state = state(); boolean isUnknown_value = isUnknown_compute(); return isUnknown_value; } /** * @apilvl internal */ private boolean isUnknown_compute() { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:393 */ @SuppressWarnings({"unchecked", "cast"}) public boolean instanceOf(TypeDecl type) { Object _parameters = type; if(instanceOf_TypeDecl_values == null) instanceOf_TypeDecl_values = new java.util.HashMap(4); if(instanceOf_TypeDecl_values.containsKey(_parameters)) { return ((Boolean)instanceOf_TypeDecl_values.get(_parameters)).booleanValue(); } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); boolean instanceOf_TypeDecl_value = instanceOf_compute(type); if(isFinal && num == state().boundariesCrossed) instanceOf_TypeDecl_values.put(_parameters, Boolean.valueOf(instanceOf_TypeDecl_value)); return instanceOf_TypeDecl_value; } /** * @apilvl internal */ private boolean instanceOf_compute(TypeDecl type) { return subtype(type); } /** * @attribute syn * @aspect TypeWideningAndIdentity * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:537 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isSupertypeOfClassDecl(ClassDecl type) { ASTNode$State state = state(); boolean isSupertypeOfClassDecl_ClassDecl_value = isSupertypeOfClassDecl_compute(type); return isSupertypeOfClassDecl_ClassDecl_value; } /** * @apilvl internal */ private boolean isSupertypeOfClassDecl_compute(ClassDecl type) { return true; } /** * @attribute syn * @aspect TypeWideningAndIdentity * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:538 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isSupertypeOfInterfaceDecl(InterfaceDecl type) { ASTNode$State state = state(); boolean isSupertypeOfInterfaceDecl_InterfaceDecl_value = isSupertypeOfInterfaceDecl_compute(type); return isSupertypeOfInterfaceDecl_InterfaceDecl_value; } /** * @apilvl internal */ private boolean isSupertypeOfInterfaceDecl_compute(InterfaceDecl type) { return true; } /** * @attribute syn * @aspect TypeWideningAndIdentity * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:539 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isSupertypeOfArrayDecl(ArrayDecl type) { ASTNode$State state = state(); boolean isSupertypeOfArrayDecl_ArrayDecl_value = isSupertypeOfArrayDecl_compute(type); return isSupertypeOfArrayDecl_ArrayDecl_value; } /** * @apilvl internal */ private boolean isSupertypeOfArrayDecl_compute(ArrayDecl type) { return true; } /** * @attribute syn * @aspect TypeWideningAndIdentity * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:540 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isSupertypeOfPrimitiveType(PrimitiveType type) { ASTNode$State state = state(); boolean isSupertypeOfPrimitiveType_PrimitiveType_value = isSupertypeOfPrimitiveType_compute(type); return isSupertypeOfPrimitiveType_PrimitiveType_value; } /** * @apilvl internal */ private boolean isSupertypeOfPrimitiveType_compute(PrimitiveType type) { return true; } /** * @attribute syn * @aspect TypeWideningAndIdentity * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:541 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isSupertypeOfNullType(NullType type) { ASTNode$State state = state(); boolean isSupertypeOfNullType_NullType_value = isSupertypeOfNullType_compute(type); return isSupertypeOfNullType_NullType_value; } /** * @apilvl internal */ private boolean isSupertypeOfNullType_compute(NullType type) { return true; } /** * @attribute syn * @aspect ConstantPoolNames * @declaredat D:\zhh\JastAddJ\Java1.4Backend\ConstantPoolNames.jrag:32 */ @SuppressWarnings({"unchecked", "cast"}) public String typeDescriptor() { if(typeDescriptor_computed) { return typeDescriptor_value; } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); typeDescriptor_value = typeDescriptor_compute(); if(isFinal && num == state().boundariesCrossed) typeDescriptor_computed = true; return typeDescriptor_value; } /** * @apilvl internal */ private String typeDescriptor_compute() { throw new Error("Trying to make a typeDescriptor() of Unknown"); } /** * @attribute syn * @aspect AutoBoxing * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\AutoBoxing.jrag:171 */ @SuppressWarnings({"unchecked", "cast"}) public TypeDecl unaryNumericPromotion() { ASTNode$State state = state(); TypeDecl unaryNumericPromotion_value = unaryNumericPromotion_compute(); return unaryNumericPromotion_value; } /** * @apilvl internal */ private TypeDecl unaryNumericPromotion_compute() { return this; } /** * @attribute syn * @aspect AutoBoxing * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\AutoBoxing.jrag:194 */ @SuppressWarnings({"unchecked", "cast"}) public TypeDecl binaryNumericPromotion(TypeDecl type) { ASTNode$State state = state(); TypeDecl binaryNumericPromotion_TypeDecl_value = binaryNumericPromotion_compute(type); return binaryNumericPromotion_TypeDecl_value; } /** * @apilvl internal */ private TypeDecl binaryNumericPromotion_compute(TypeDecl type) { return this; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:414 */ @SuppressWarnings({"unchecked", "cast"}) public boolean subtype(TypeDecl type) { Object _parameters = type; if(subtype_TypeDecl_values == null) subtype_TypeDecl_values = new java.util.HashMap(4); ASTNode$State.CircularValue _value; if(subtype_TypeDecl_values.containsKey(_parameters)) { Object _o = subtype_TypeDecl_values.get(_parameters); if(!(_o instanceof ASTNode$State.CircularValue)) { return ((Boolean)_o).booleanValue(); } else _value = (ASTNode$State.CircularValue)_o; } else { _value = new ASTNode$State.CircularValue(); subtype_TypeDecl_values.put(_parameters, _value); _value.value = Boolean.valueOf(true); } ASTNode$State state = state(); if (!state.IN_CIRCLE) { state.IN_CIRCLE = true; int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); boolean new_subtype_TypeDecl_value; do { _value.visited = new Integer(state.CIRCLE_INDEX); state.CHANGE = false; new_subtype_TypeDecl_value = subtype_compute(type); if (new_subtype_TypeDecl_value!=((Boolean)_value.value).booleanValue()) { state.CHANGE = true; _value.value = Boolean.valueOf(new_subtype_TypeDecl_value); } state.CIRCLE_INDEX++; } while (state.CHANGE); if(isFinal && num == state().boundariesCrossed) { subtype_TypeDecl_values.put(_parameters, new_subtype_TypeDecl_value); } else { subtype_TypeDecl_values.remove(_parameters); state.RESET_CYCLE = true; subtype_compute(type); state.RESET_CYCLE = false; } state.IN_CIRCLE = false; return new_subtype_TypeDecl_value; } if(!new Integer(state.CIRCLE_INDEX).equals(_value.visited)) { _value.visited = new Integer(state.CIRCLE_INDEX); boolean new_subtype_TypeDecl_value = subtype_compute(type); if (state.RESET_CYCLE) { subtype_TypeDecl_values.remove(_parameters); } else if (new_subtype_TypeDecl_value!=((Boolean)_value.value).booleanValue()) { state.CHANGE = true; _value.value = new_subtype_TypeDecl_value; } return new_subtype_TypeDecl_value; } return ((Boolean)_value.value).booleanValue(); } /** * @apilvl internal */ private boolean subtype_compute(TypeDecl type) { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:415 */ @SuppressWarnings({"unchecked", "cast"}) public boolean supertypeClassDecl(ClassDecl type) { ASTNode$State state = state(); boolean supertypeClassDecl_ClassDecl_value = supertypeClassDecl_compute(type); return supertypeClassDecl_ClassDecl_value; } /** * @apilvl internal */ private boolean supertypeClassDecl_compute(ClassDecl type) { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:416 */ @SuppressWarnings({"unchecked", "cast"}) public boolean supertypeInterfaceDecl(InterfaceDecl type) { ASTNode$State state = state(); boolean supertypeInterfaceDecl_InterfaceDecl_value = supertypeInterfaceDecl_compute(type); return supertypeInterfaceDecl_InterfaceDecl_value; } /** * @apilvl internal */ private boolean supertypeInterfaceDecl_compute(InterfaceDecl type) { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:417 */ @SuppressWarnings({"unchecked", "cast"}) public boolean supertypeArrayDecl(ArrayDecl type) { ASTNode$State state = state(); boolean supertypeArrayDecl_ArrayDecl_value = supertypeArrayDecl_compute(type); return supertypeArrayDecl_ArrayDecl_value; } /** * @apilvl internal */ private boolean supertypeArrayDecl_compute(ArrayDecl type) { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:418 */ @SuppressWarnings({"unchecked", "cast"}) public boolean supertypePrimitiveType(PrimitiveType type) { ASTNode$State state = state(); boolean supertypePrimitiveType_PrimitiveType_value = supertypePrimitiveType_compute(type); return supertypePrimitiveType_PrimitiveType_value; } /** * @apilvl internal */ private boolean supertypePrimitiveType_compute(PrimitiveType type) { return true; } /** * @attribute syn * @aspect GenericsSubtype * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\GenericsSubtype.jrag:419 */ @SuppressWarnings({"unchecked", "cast"}) public boolean supertypeNullType(NullType type) { ASTNode$State state = state(); boolean supertypeNullType_NullType_value = supertypeNullType_compute(type); return supertypeNullType_NullType_value; } /** * @apilvl internal */ private boolean supertypeNullType_compute(NullType type) { return true; } /** * @apilvl internal */ public ASTNode rewriteTo() { return super.rewriteTo(); } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.common; import android.util.Log; import io.flutter.plugin.common.BinaryMessenger.BinaryMessageHandler; import io.flutter.plugin.common.BinaryMessenger.BinaryReply; import java.nio.ByteBuffer; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; /** * A named channel for communicating with the Flutter application using asynchronous * event streams. * * <p>Incoming requests for event stream setup are decoded from binary on receipt, and * Java responses and events are encoded into binary before being transmitted back * to Flutter. The {@link MethodCodec} used must be compatible with the one used by * the Flutter application. This can be achieved by creating an * <a href="https://docs.flutter.io/flutter/services/EventChannel-class.html">EventChannel</a> * counterpart of this channel on the Dart side. The Java type of stream configuration arguments, * events, and error details is {@code Object}, but only values supported by the specified * {@link MethodCodec} can be used.</p> * * <p>The logical identity of the channel is given by its name. Identically named channels will interfere * with each other's communication.</p> */ public final class EventChannel { private static final String TAG = "EventChannel#"; private final BinaryMessenger messenger; private final String name; private final MethodCodec codec; /** * Creates a new channel associated with the specified {@link BinaryMessenger} * and with the specified name and the standard {@link MethodCodec}. * * @param messenger a {@link BinaryMessenger}. * @param name a channel name String. */ public EventChannel(BinaryMessenger messenger, String name) { this(messenger, name, StandardMethodCodec.INSTANCE); } /** * Creates a new channel associated with the specified {@link BinaryMessenger} * and with the specified name and {@link MethodCodec}. * * @param messenger a {@link BinaryMessenger}. * @param name a channel name String. * @param codec a {@link MessageCodec}. */ public EventChannel(BinaryMessenger messenger, String name, MethodCodec codec) { assert messenger != null; assert name != null; assert codec != null; this.messenger = messenger; this.name = name; this.codec = codec; } /** * Registers a stream handler on this channel. * * <p>Overrides any existing handler registration for (the name of) this channel.</p> * * <p>If no handler has been registered, any incoming stream setup requests will be handled * silently by providing an empty stream.</p> * * @param handler a {@link StreamHandler}, or null to deregister. */ public void setStreamHandler(final StreamHandler handler) { messenger.setMessageHandler(name, handler == null ? null : new IncomingStreamRequestHandler(handler)); } /** * Handler of stream setup and tear-down requests. * * <p>Implementations must be prepared to accept sequences of alternating calls to * {@link #onListen(Object, EventSink)} and {@link #onCancel(Object)}. Implementations * should ideally consume no resources when the last such call is not {@code onListen}. * In typical situations, this means that the implementation should register itself * with platform-specific event sources {@code onListen} and deregister again * {@code onCancel}.</p> */ public interface StreamHandler { /** * Handles a request to set up an event stream. * * <p>Any uncaught exception thrown by this method will be caught by the channel * implementation and logged. An error result message will be sent back to Flutter.</p> * * @param arguments stream configuration arguments, possibly null. * @param events an {@link EventSink} for emitting events to the Flutter receiver. */ void onListen(Object arguments, EventSink events); /** * Handles a request to tear down the most recently created event stream. * * <p>Any uncaught exception thrown by this method will be caught by the channel * implementation and logged. An error result message will be sent back to Flutter.</p> * * <p>The channel implementation may call this method with null arguments * to separate a pair of two consecutive set up requests. Such request pairs * may occur during Flutter hot restart. Any uncaught exception thrown * in this situation will be logged without notifying Flutter.</p> * * @param arguments stream configuration arguments, possibly null. */ void onCancel(Object arguments); } /** * Event callback. Supports dual use: Producers of events to be sent to Flutter * act as clients of this interface for sending events. Consumers of events sent * from Flutter implement this interface for handling received events (the latter * facility has not been implemented yet). */ public interface EventSink { /** * Consumes a successful event. * * @param event the event, possibly null. */ void success(Object event); /** * Consumes an error event. * * @param errorCode an error code String. * @param errorMessage a human-readable error message String, possibly null. * @param errorDetails error details, possibly null */ void error(String errorCode, String errorMessage, Object errorDetails); /** * Consumes end of stream. Ensuing calls to {@link #success(Object)} or * {@link #error(String, String, Object)}, if any, are ignored. */ void endOfStream(); } private final class IncomingStreamRequestHandler implements BinaryMessageHandler { private final StreamHandler handler; private final AtomicReference<EventSink> activeSink = new AtomicReference<>(null); IncomingStreamRequestHandler(StreamHandler handler) { this.handler = handler; } @Override public void onMessage(ByteBuffer message, final BinaryReply reply) { final MethodCall call = codec.decodeMethodCall(message); if (call.method.equals("listen")) { onListen(call.arguments, reply); } else if (call.method.equals("cancel")) { onCancel(call.arguments, reply); } else { reply.reply(null); } } private void onListen(Object arguments, BinaryReply callback) { final EventSink eventSink = new EventSinkImplementation(); final EventSink oldSink = activeSink.getAndSet(eventSink); if (oldSink != null) { // Repeated calls to onListen may happen during hot restart. // We separate them with a call to onCancel. try { handler.onCancel(null); } catch (RuntimeException e) { Log.e(TAG + name, "Failed to close existing event stream", e); } } try { handler.onListen(arguments, eventSink); callback.reply(codec.encodeSuccessEnvelope(null)); } catch (RuntimeException e) { activeSink.set(null); Log.e(TAG + name, "Failed to open event stream", e); callback.reply(codec.encodeErrorEnvelope("error", e.getMessage(), null)); } } private void onCancel(Object arguments, BinaryReply callback) { final EventSink oldSink = activeSink.getAndSet(null); if (oldSink != null) { try { handler.onCancel(arguments); callback.reply(codec.encodeSuccessEnvelope(null)); } catch (RuntimeException e) { Log.e(TAG + name, "Failed to close event stream", e); callback.reply(codec.encodeErrorEnvelope("error", e.getMessage(), null)); } } else { callback.reply(codec.encodeErrorEnvelope("error", "No active stream to cancel", null)); } } private final class EventSinkImplementation implements EventSink { final AtomicBoolean hasEnded = new AtomicBoolean(false); @Override public void success(Object event) { if (hasEnded.get() || activeSink.get() != this) { return; } EventChannel.this.messenger.send(name, codec.encodeSuccessEnvelope(event)); } @Override public void error(String errorCode, String errorMessage, Object errorDetails) { if (hasEnded.get() || activeSink.get() != this) { return; } EventChannel.this.messenger.send( name, codec.encodeErrorEnvelope(errorCode, errorMessage, errorDetails)); } @Override public void endOfStream() { if (hasEnded.getAndSet(true) || activeSink.get() != this) { return; } EventChannel.this.messenger.send(name, null); } } } }
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.weblayer.test; import static org.chromium.base.test.util.Restriction.RESTRICTION_TYPE_NON_LOW_END_DEVICE; import static org.chromium.content_public.browser.test.util.TestThreadUtils.runOnUiThreadBlocking; import android.support.test.InstrumentationRegistry; import android.view.View; import androidx.test.filters.LargeTest; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.Criteria; import org.chromium.base.test.util.CriteriaHelper; import org.chromium.base.test.util.DisabledTest; import org.chromium.base.test.util.Feature; import org.chromium.base.test.util.FlakyTest; import org.chromium.base.test.util.Restriction; import org.chromium.content_public.browser.test.util.ClickUtils; import org.chromium.content_public.browser.test.util.TestTouchUtils; import org.chromium.content_public.common.ContentSwitches; import org.chromium.ui.test.util.UiRestriction; import org.chromium.weblayer.Browser; import org.chromium.weblayer.Tab; import org.chromium.weblayer.TestWebLayer; import org.chromium.weblayer.shell.InstrumentationActivity; /** * Tests of the Presentation API. */ @MinWebLayerVersion(88) @RunWith(WebLayerJUnit4ClassRunner.class) @CommandLineFlags.Add({ContentSwitches.DISABLE_GESTURE_REQUIREMENT_FOR_PRESENTATION}) public class MediaRouterTest { @Rule public InstrumentationActivityTestRule mActivityTestRule = new InstrumentationActivityTestRule(); private InstrumentationActivity mActivity; private static final String TEST_PAGE = "media_router/basic_test.html"; private static final int SCRIPT_TIMEOUT_MS = 10000; private static final int SCRIPT_RETRY_MS = 150; private static final String TEST_SINK_NAME = "test-sink-1"; // Javascript snippets. private static final String WAIT_DEVICE_SCRIPT = "waitUntilDeviceAvailable();"; private static final String START_PRESENTATION_SCRIPT = "startPresentation();"; private static final String TERMINATE_CONNECTION_SCRIPT = "terminateConnectionAndWaitForStateChange();"; @Before public void setUp() { mActivity = mActivityTestRule.launchShellWithUrl("about:blank"); } private TestWebLayer getTestWebLayer() { return TestWebLayer.getTestWebLayer(mActivity.getApplicationContext()); } private void executeScriptAndWaitForResult(String script) throws Exception { mActivityTestRule.executeScriptSync("lastExecutionResult = null", false); mActivityTestRule.executeScriptSync(script, false); CriteriaHelper.pollInstrumentationThread(() -> { String result = mActivityTestRule.executeScriptAndExtractString("lastExecutionResult", false); Criteria.checkThat(result, Matchers.is("passed")); }, SCRIPT_TIMEOUT_MS, SCRIPT_RETRY_MS); } private void startPresentationAndSelectRoute() throws Exception { // Request a presentation. mActivityTestRule.navigateAndWait(mActivityTestRule.getTestDataURL(TEST_PAGE)); executeScriptAndWaitForResult(WAIT_DEVICE_SCRIPT); executeScriptAndWaitForResult(START_PRESENTATION_SCRIPT); // Verify the route selection dialog is showing and make a selection. View testRouteButton = getTestWebLayer().getMediaRouteButton(TEST_SINK_NAME); Assert.assertNotNull(testRouteButton); ClickUtils.mouseSingleClickView( InstrumentationRegistry.getInstrumentation(), testRouteButton); } private String verifyPresentationStarted() throws Exception { // Verify in javascript that a presentation has started. executeScriptAndWaitForResult("checkConnection();"); String connectionId = mActivityTestRule.executeScriptAndExtractString("startedConnection.id", false); Assert.assertFalse(connectionId.isEmpty()); String defaultRequestConnectionId = mActivityTestRule.executeScriptAndExtractString( "defaultRequestConnectionId", false); Assert.assertEquals(connectionId, defaultRequestConnectionId); return connectionId; } void checkStartFailed(String errorName, String errorMessageSubstring) throws Exception { String script = String.format("checkStartFailed('%s', '%s');", errorName, errorMessageSubstring); executeScriptAndWaitForResult(script); } /** * Basic test where the page requests a route, the user selects a route, and a connection is * started. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testBasic() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); verifyPresentationStarted(); executeScriptAndWaitForResult(TERMINATE_CONNECTION_SCRIPT); } /** Test of PresentationConnection.onmessage. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testSendAndOnMessage() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); verifyPresentationStarted(); executeScriptAndWaitForResult("sendMessageAndExpectResponse('foo');"); } /** Test of PresentationConnection.onclose. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testOnClose() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/true, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); verifyPresentationStarted(); executeScriptAndWaitForResult("sendMessageAndExpectConnectionCloseOnError()"); } /** * Test that starting the presentation fails when there are no providers that support the given * source. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testFailNoProvider() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/true, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); checkStartFailed("UnknownError", "No provider supports createRoute with source"); } /** Tests route creation failure. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest @FlakyTest(message = "https://crbug.com/1181337") public void testFailCreateRoute() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/"Unknown sink", /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); checkStartFailed("UnknownError", "Unknown sink"); } /** Tests reconnecting to a presentation (joining a route) from a new tab. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testJoinRoute() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); startPresentationAndSelectRoute(); String connectionId = verifyPresentationStarted(); Tab firstTab = mActivity.getTab(); Tab secondTab = runOnUiThreadBlocking(() -> { Browser browser = mActivity.getTab().getBrowser(); Tab tab = browser.createTab(); browser.setActiveTab(tab); return tab; }); mActivityTestRule.navigateAndWait( secondTab, mActivityTestRule.getTestDataURL(TEST_PAGE), true); executeScriptAndWaitForResult(String.format("reconnectConnection(\'%s\');", connectionId)); String reconnectedConnectionId = mActivityTestRule.executeScriptAndExtractString("reconnectedConnection.id", false); Assert.assertEquals(connectionId, reconnectedConnectionId); runOnUiThreadBlocking(() -> { firstTab.getBrowser().setActiveTab(firstTab); }); executeScriptAndWaitForResult(TERMINATE_CONNECTION_SCRIPT); } /** Tests failure of reconnecting to a presentation (joining a route) from a new tab. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @LargeTest public void testFailureToJoinRoute() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/"Unknown route"); startPresentationAndSelectRoute(); String connectionId = verifyPresentationStarted(); Tab secondTab = runOnUiThreadBlocking(() -> { Browser browser = mActivity.getTab().getBrowser(); Tab tab = browser.createTab(); browser.setActiveTab(tab); return tab; }); mActivityTestRule.navigateAndWait( secondTab, mActivityTestRule.getTestDataURL(TEST_PAGE), true); executeScriptAndWaitForResult( String.format("reconnectConnectionAndExpectFailure(\'%s\');", connectionId)); } /** Tests the user cancelling the media route selection process. */ @Test @Restriction({UiRestriction.RESTRICTION_TYPE_PHONE, RESTRICTION_TYPE_NON_LOW_END_DEVICE}) @Feature({"MediaRouter"}) @DisabledTest(message = "https://crbug.com/1144233") @LargeTest public void testFailStartCancelled() throws Exception { getTestWebLayer().initializeMockMediaRouteProvider(/*closeRouteWithErrorOnSend=*/false, /*disableIsSupportsSource=*/false, /*createRouteErrorMessage=*/null, /*joinRouteErrorMessage=*/null); // Request a presentation. mActivityTestRule.navigateAndWait(mActivityTestRule.getTestDataURL(TEST_PAGE)); executeScriptAndWaitForResult(WAIT_DEVICE_SCRIPT); executeScriptAndWaitForResult(START_PRESENTATION_SCRIPT); // Verify the route selection dialog is showing but then dismiss it. View testRouteButton = getTestWebLayer().getMediaRouteButton(TEST_SINK_NAME); Assert.assertNotNull(testRouteButton); // Click outside the dialog to dismiss it. View topContents = mActivity.getTopContentsContainer(); TestTouchUtils.singleClick( InstrumentationRegistry.getInstrumentation(), 1, topContents.getHeight() + 10); checkStartFailed("NotAllowedError", "Dialog closed."); } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.camera; import android.app.Activity; import android.graphics.drawable.Drawable; import android.util.Log; import com.android.camera.CameraPreference.OnPreferenceChangedListener; import com.android.camera.drawable.TextDrawable; import com.android.camera.ui.PieItem; import com.android.camera.ui.PieItem.OnClickListener; import com.android.camera.ui.PieRenderer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class PieController { private static String TAG = "CAM_piecontrol"; protected static final int MODE_PHOTO = 0; protected static final int MODE_VIDEO = 1; protected static float CENTER = (float) Math.PI / 2; protected static final float SWEEP = 0.06f; protected Activity mActivity; protected PreferenceGroup mPreferenceGroup; protected OnPreferenceChangedListener mListener; protected PieRenderer mRenderer; private List<IconListPreference> mPreferences; private Map<IconListPreference, PieItem> mPreferenceMap; private Map<IconListPreference, String> mOverrides; public void setListener(OnPreferenceChangedListener listener) { mListener = listener; } public PieController(Activity activity, PieRenderer pie) { mActivity = activity; mRenderer = pie; mPreferences = new ArrayList<IconListPreference>(); mPreferenceMap = new HashMap<IconListPreference, PieItem>(); mOverrides = new HashMap<IconListPreference, String>(); } public void initialize(PreferenceGroup group) { mRenderer.clearItems(); mPreferenceMap.clear(); setPreferenceGroup(group); } public void onSettingChanged(ListPreference pref) { if (mListener != null) { mListener.onSharedPreferenceChanged(); } } protected void setCameraId(int cameraId) { ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CAMERA_ID); pref.setValue("" + cameraId); } protected PieItem makeItem(int resId) { // We need a mutable version as we change the alpha Drawable d = mActivity.getResources().getDrawable(resId).mutate(); return new PieItem(d, 0); } protected PieItem makeItem(CharSequence value) { TextDrawable drawable = new TextDrawable(mActivity.getResources(), value); return new PieItem(drawable, 0); } public PieItem makeItem(String prefKey) { final IconListPreference pref = (IconListPreference) mPreferenceGroup.findPreference(prefKey); if (pref == null) return null; int[] iconIds = pref.getLargeIconIds(); int resid = -1; if (!pref.getUseSingleIcon() && iconIds != null) { // Each entry has a corresponding icon. int index = pref.findIndexOfValue(pref.getValue()); resid = iconIds[index]; } else { // The preference only has a single icon to represent it. resid = pref.getSingleIcon(); } PieItem item = makeItem(resid); item.setLabel(pref.getTitle().toUpperCase()); mPreferences.add(pref); mPreferenceMap.put(pref, item); int nOfEntries = pref.getEntries().length; if (nOfEntries > 1) { for (int i = 0; i < nOfEntries; i++) { PieItem inner = null; if (iconIds != null) { inner = makeItem(iconIds[i]); } else { inner = makeItem(pref.getEntries()[i]); } inner.setLabel(pref.getLabels()[i]); item.addItem(inner); final int index = i; inner.setOnClickListener(new OnClickListener() { @Override public void onClick(PieItem item) { pref.setValueIndex(index); reloadPreference(pref); onSettingChanged(pref); } }); } } return item; } public PieItem makeSwitchItem(final String prefKey, boolean addListener) { final IconListPreference pref = (IconListPreference) mPreferenceGroup.findPreference(prefKey); if (pref == null) return null; int[] iconIds = pref.getLargeIconIds(); int resid = -1; int index = pref.findIndexOfValue(pref.getValue()); if (!pref.getUseSingleIcon() && iconIds != null) { // Each entry has a corresponding icon. resid = iconIds[index]; } else { // The preference only has a single icon to represent it. resid = pref.getSingleIcon(); } PieItem item = makeItem(resid); item.setLabel(pref.getLabels()[index]); item.setImageResource(mActivity, resid); mPreferences.add(pref); mPreferenceMap.put(pref, item); if (addListener) { final PieItem fitem = item; item.setOnClickListener(new OnClickListener() { @Override public void onClick(PieItem item) { if (!item.isEnabled()) { return; } IconListPreference pref = (IconListPreference) mPreferenceGroup .findPreference(prefKey); int index = pref.findIndexOfValue(pref.getValue()); CharSequence[] values = pref.getEntryValues(); index = (index + 1) % values.length; pref.setValueIndex(index); fitem.setLabel(pref.getLabels()[index]); fitem.setImageResource(mActivity, ((IconListPreference) pref).getLargeIconIds()[index]); reloadPreference(pref); onSettingChanged(pref); } }); } return item; } public PieItem makeDialItem(ListPreference pref, int iconId, float center, float sweep) { PieItem item = makeItem(iconId); return item; } public void addItem(String prefKey) { PieItem item = makeItem(prefKey); mRenderer.addItem(item); } public void updateItem(PieItem item, String prefKey) { IconListPreference pref = (IconListPreference) mPreferenceGroup .findPreference(prefKey); if (pref != null) { int index = pref.findIndexOfValue(pref.getValue()); item.setLabel(pref.getLabels()[index]); item.setImageResource(mActivity, ((IconListPreference) pref).getLargeIconIds()[index]); } } public void setPreferenceGroup(PreferenceGroup group) { mPreferenceGroup = group; } public void reloadPreferences() { mPreferenceGroup.reloadValue(); for (IconListPreference pref : mPreferenceMap.keySet()) { reloadPreference(pref); } } private void reloadPreference(IconListPreference pref) { if (pref.getUseSingleIcon()) return; PieItem item = mPreferenceMap.get(pref); String overrideValue = mOverrides.get(pref); int[] iconIds = pref.getLargeIconIds(); if (iconIds != null) { // Each entry has a corresponding icon. int index; if (overrideValue == null) { index = pref.findIndexOfValue(pref.getValue()); } else { index = pref.findIndexOfValue(overrideValue); if (index == -1) { // Avoid the crash if camera driver has bugs. Log.e(TAG, "Fail to find override value=" + overrideValue); pref.print(); return; } } item.setImageResource(mActivity, iconIds[index]); } else { // The preference only has a single icon to represent it. item.setImageResource(mActivity, pref.getSingleIcon()); } } // Scene mode may override other camera settings (ex: flash mode). public void overrideSettings(final String ... keyvalues) { if (keyvalues.length % 2 != 0) { throw new IllegalArgumentException(); } for (IconListPreference pref : mPreferenceMap.keySet()) { override(pref, keyvalues); } } private void override(IconListPreference pref, final String ... keyvalues) { mOverrides.remove(pref); for (int i = 0; i < keyvalues.length; i += 2) { String key = keyvalues[i]; String value = keyvalues[i + 1]; if (key.equals(pref.getKey())) { mOverrides.put(pref, value); PieItem item = mPreferenceMap.get(pref); item.setEnabled(value == null); break; } } reloadPreference(pref); } }
package ch.alv.sysinfos.web.rest.system; import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.inject.Inject; import ch.alv.sysinfos.Application; import ch.alv.sysinfos.config.Route; import ch.alv.sysinfos.domain.system.System; import ch.alv.sysinfos.domain.system.SystemMessage; import ch.alv.sysinfos.domain.system.SystemMessageType; import ch.alv.sysinfos.domain.system.SystemState; import ch.alv.sysinfos.repository.SystemMessageListRepository; import ch.alv.sysinfos.repository.SystemMessageRepository; import ch.alv.sysinfos.repository.SystemRepository; import ch.alv.sysinfos.web.rest.TestUtil; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockitoAnnotations; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.format.support.FormattingConversionService; import org.springframework.http.MediaType; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.transaction.annotation.Transactional; /** * Test class for the SystemMessageResource REST controller. * * @see SystemMessageResource */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = Application.class) @WebAppConfiguration @Transactional public class SystemMessageResourceTest { private static final String ISO_DATE_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"; private static final DateTimeZone DEFAULT_TIME_ZONE = DateTimeZone.forID("CET"); private static final DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern(ISO_DATE_TIME).withZone(DEFAULT_TIME_ZONE); private static final String DEFAULT_LOCALE = Locale.ENGLISH.getLanguage(); private static final String UPDATED_LOCALE = Locale.FRENCH.getLanguage(); private static final String DEFAULT_SUBJECT = "SAMPLE_TEXT"; private static final String UPDATED_SUBJECT = "UPDATED_TEXT"; private static final String DEFAULT_BODY = "SAMPLE_TEXT"; private static final String UPDATED_BODY = "UPDATED_TEXT"; private static final DateTime DEFAULT_VALID_FROM = new DateTime().withZone(DEFAULT_TIME_ZONE); private static final DateTime UPDATED_VALID_FROM = new DateTime().withZone(DEFAULT_TIME_ZONE).plusHours(1).withMillisOfSecond(0); private static final String DEFAULT_VALID_FROM_STR = dateTimeFormatter.print(DEFAULT_VALID_FROM); private static final DateTime DEFAULT_VALID_TO = new DateTime().plusHours(2).withZone(DEFAULT_TIME_ZONE); private static final DateTime UPDATED_VALID_TO = new DateTime().plusHours(4).withZone(DEFAULT_TIME_ZONE).withMillisOfSecond(0); private static final String DEFAULT_VALID_TO_STR = dateTimeFormatter.print(DEFAULT_VALID_TO); private static final SystemState DEFAULT_SEVERITY = SystemState.OPERATIONAL; private static final SystemState UPDATED_SEVERITY = SystemState.DEGRADED_PERFORMANCE; private static final SystemMessageType DEFAULT_TYPE = SystemMessageType.INCIDENT; private static final SystemMessageType UPDATED_TYPE = SystemMessageType.MAINTENANCE_WINDOW; Map noticationMap = new HashMap(); @Inject private FormattingConversionService formattingConversionService; @Inject private MappingJackson2HttpMessageConverter mappingJackson2HttpMessageConverter; @Inject private SystemMessageRepository systemMessageRepository; @Inject private SystemMessageListRepository systemMessageListRepository; @Inject private SystemRepository systemRepository; private MockMvc restSystemMessageMockMvc; private SystemMessage systemMessage; private System system; private Map<String, String> subject; private Map<String, String> body; @Before public void setup() { MockitoAnnotations.initMocks(this); SystemMessageResource systemMessageResource = new SystemMessageResource(systemMessageRepository, systemMessageListRepository); this.restSystemMessageMockMvc = MockMvcBuilders.standaloneSetup(systemMessageResource) .setConversionService(formattingConversionService) .setMessageConverters(mappingJackson2HttpMessageConverter) .build(); system = new System(); system.setActive(true); Map<String, String> systemName = new HashMap<>(); systemName.put(Locale.ENGLISH.getLanguage(), "testName" + Math.random()); system.setName(systemName); system.setContactEmail("test@testmail.com"); system.setContactName("Contact Testperson"); noticationMap.put("de", "testDe"); noticationMap.put("fr", "testFr"); noticationMap.put("it", "testIt"); } @Before public void initTest() { systemMessage = new SystemMessage(); subject = new HashMap<>(); subject.put(DEFAULT_LOCALE, DEFAULT_SUBJECT); systemMessage.setSubject(subject); body = new HashMap<>(); body.put(DEFAULT_LOCALE, DEFAULT_BODY); systemMessage.setBody(body); systemMessage.setValidFrom(DEFAULT_VALID_FROM); systemMessage.setValidTo(DEFAULT_VALID_TO); systemMessage.setSeverity(DEFAULT_SEVERITY); systemMessage.setType(DEFAULT_TYPE); List<System> systems = new ArrayList<>(); systems.add(system); systemMessage.setSystems(systems); } @Test public void createSystemMessage() throws Exception { // Validate the database is empty assertThat(systemMessageRepository.findAll()).hasSize(0); system = systemRepository.save(system); // Create the SystemMessage restSystemMessageMockMvc.perform(post(Route.REST_APP_SYSTEM_MESSAGES) .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(systemMessage))) .andExpect(status().isOk()) .andExpect(jsonPath("$.validFrom").value(DEFAULT_VALID_FROM_STR)) .andExpect(jsonPath("$.validTo").value(DEFAULT_VALID_TO_STR)) .andExpect(jsonPath("$.severity").value(DEFAULT_SEVERITY.toString())) .andExpect(jsonPath("$.type").value(DEFAULT_TYPE.toString())); // Validate the SystemMessage in the database assertThat(systemMessageRepository.count()).isEqualTo(1L); SystemMessage testSystemMessage = systemMessageRepository.findAll().get(0); assertThat(testSystemMessage.getSubject()).isEqualTo(subject); assertThat(testSystemMessage.getBody()).isEqualTo(body); assertThat(testSystemMessage.getSeverity()).isEqualTo(DEFAULT_SEVERITY); assertThat(testSystemMessage.getType()).isEqualTo(DEFAULT_TYPE); } @Test public void getAllSystemMessages() throws Exception { // Initialize the database system = systemRepository.save(system); systemMessage = systemMessageRepository.saveAndFlush(systemMessage); // Get all the systemMessages restSystemMessageMockMvc.perform(get(Route.REST_APP_SYSTEM_MESSAGES)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.[0].id").value(systemMessage.getId().intValue())) .andExpect(jsonPath("$.[0].subject.en").value(DEFAULT_SUBJECT.toString())) .andExpect(jsonPath("$.[0].validFrom").value(DEFAULT_VALID_FROM_STR)) .andExpect(jsonPath("$.[0].validTo").value(DEFAULT_VALID_TO_STR)) .andExpect(jsonPath("$.[0].severity").value(DEFAULT_SEVERITY.toString())) .andExpect(jsonPath("$.[0].type").value(DEFAULT_TYPE.toString())); } @Test public void getSystemMessage() throws Exception { // Initialize the database system = systemRepository.save(system); systemMessageRepository.saveAndFlush(systemMessage); // Get the systemMessage restSystemMessageMockMvc.perform(get(Route.REST_APP_SYSTEM_MESSAGES + "/{id}", systemMessage.getId())) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id").value(systemMessage.getId().intValue())) .andExpect(jsonPath("$.subject.en").value(DEFAULT_SUBJECT.toString())) .andExpect(jsonPath("$.body.en").value(DEFAULT_BODY.toString())) .andExpect(jsonPath("$.validFrom").value(DEFAULT_VALID_FROM_STR)) .andExpect(jsonPath("$.validTo").value(DEFAULT_VALID_TO_STR)) .andExpect(jsonPath("$.severity").value(DEFAULT_SEVERITY.toString())) .andExpect(jsonPath("$.type").value(DEFAULT_TYPE.toString())); } @Test public void getNonExistingSystemMessage() throws Exception { // Get the systemMessage restSystemMessageMockMvc.perform(get(Route.REST_APP_SYSTEM_MESSAGES + "/{id}", 999L)) .andExpect(status().isNotFound()); } @Test public void updateSystemMessage() throws Exception { // Initialize the database system = systemRepository.save(system); systemMessageRepository.saveAndFlush(systemMessage); // Update the systemMessage systemMessage.getSubject().put(UPDATED_LOCALE, UPDATED_SUBJECT); systemMessage.getBody().put(UPDATED_LOCALE, UPDATED_BODY); systemMessage.setValidFrom(UPDATED_VALID_FROM); systemMessage.setValidTo(UPDATED_VALID_TO); systemMessage.setSeverity(UPDATED_SEVERITY); systemMessage.setType(UPDATED_TYPE); restSystemMessageMockMvc.perform(post(Route.REST_APP_SYSTEM_MESSAGES) .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(systemMessage))) .andExpect(status().isOk()); // Validate the SystemMessage in the database List<SystemMessage> systemMessages = systemMessageRepository.findAll(); assertThat(systemMessages).hasSize(1); SystemMessage testSystemMessage = systemMessages.iterator().next(); assertThat(testSystemMessage.getSubject()).isEqualTo(subject); assertThat(testSystemMessage.getBody()).isEqualTo(body); assertThat(testSystemMessage.getValidFrom().withZone(DEFAULT_TIME_ZONE)).isEqualTo(UPDATED_VALID_FROM); assertThat(testSystemMessage.getValidTo().withZone(DEFAULT_TIME_ZONE)).isEqualTo(UPDATED_VALID_TO); assertThat(testSystemMessage.getSeverity()).isEqualTo(UPDATED_SEVERITY); assertThat(testSystemMessage.getType()).isEqualTo(UPDATED_TYPE); } @Test public void deleteSystemMessage() throws Exception { // Initialize the database system = systemRepository.save(system); systemMessageRepository.saveAndFlush(systemMessage); int size = systemMessageRepository.findAll().size(); // Get the systemMessage restSystemMessageMockMvc.perform(delete(Route.REST_APP_SYSTEM_MESSAGES + "/{id}", systemMessage.getId()) .accept(TestUtil.APPLICATION_JSON_UTF8)) .andExpect(status().isOk()); // Validate the database is empty List<SystemMessage> systemMessages = systemMessageRepository.findAll(); assertThat(systemMessages).hasSize(size - 1); } }
/* * SqlOptionsPanel.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.gui.dialogs.export; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; import javax.swing.ButtonGroup; import javax.swing.ComboBoxModel; import javax.swing.DefaultComboBoxModel; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.SwingUtilities; import workbench.log.LogMgr; import workbench.resource.ResourceMgr; import workbench.resource.Settings; import workbench.db.ColumnIdentifier; import workbench.db.TableIdentifier; import workbench.db.exporter.BlobMode; import workbench.db.exporter.ExportType; import workbench.gui.WbSwingUtilities; import workbench.gui.components.ColumnSelectorPanel; import workbench.gui.components.FoldingPanel; import workbench.gui.components.KeyColumnSelectorPanel; import workbench.storage.MergeGenerator; import workbench.storage.ResultInfo; import workbench.util.StringUtil; /** * * @author Thomas Kellerer */ public class SqlOptionsPanel extends JPanel implements SqlOptions, ActionListener { private List<String> keyColumns; private ColumnSelectorPanel columnSelectorPanel; private ResultInfo tableColumns; public SqlOptionsPanel(ResultInfo info) { super(); initComponents(); setResultInfo(info); List<String> types = Settings.getInstance().getLiteralTypeList(); ComboBoxModel model = new DefaultComboBoxModel(types.toArray()); literalTypes.setModel(model); List<String> mTypes = MergeGenerator.Factory.getSupportedTypes(); ComboBoxModel mergeModel = new DefaultComboBoxModel(mTypes.toArray()); mergeTypes.setModel(mergeModel); List<String> bTypes = BlobMode.getTypes(); bTypes.remove(BlobMode.Base64.getTypeString()); ComboBoxModel blobModel = new DefaultComboBoxModel(bTypes.toArray()); blobTypes.setModel(blobModel); blobTypes.setSelectedItem(BlobMode.SaveToFile.toString()); WbSwingUtilities.setMinimumSizeFromCols(commitCount); GridBagLayout layout = (GridBagLayout)getLayout(); GridBagConstraints c = layout.getConstraints(extOptionsPanel); remove(extOptionsPanel); FoldingPanel p = new FoldingPanel(extOptionsPanel); add(p, c); invalidate(); } public final void setResultInfo(ResultInfo info) { this.tableColumns = info; boolean hasColumns = tableColumns != null; boolean keysPresent = (info == null ? false : info.hasPkColumns()); this.selectKeys.setEnabled(hasColumns); this.setIncludeDeleteInsert(keysPresent); this.setIncludeUpdate(keysPresent); if (info != null) { TableIdentifier table = info.getUpdateTable(); if (table != null) { this.alternateTable.setText(table.getTableName()); } else { this.alternateTable.setText("target_table"); } } } public void saveSettings() { Settings s = Settings.getInstance(); s.setProperty("workbench.export.sql.commitevery", this.getCommitEvery()); s.setProperty("workbench.export.sql.createtable", this.getCreateTable()); s.setProperty("workbench.export.sql.saveas.dateliterals", this.getDateLiteralType()); s.setProperty("workbench.export.sql.saveas.blobliterals", this.getBlobMode().getTypeString()); } public void restoreSettings() { Settings s = Settings.getInstance(); this.setCommitEvery(s.getIntProperty("workbench.export.sql.commitevery", 0)); this.setCreateTable(s.getBoolProperty("workbench.export.sql.createtable")); String def = s.getProperty("workbench.export.sql.default.dateliterals", "dbms"); String type = s.getProperty("workbench.export.sql.saveas.dateliterals", def); this.literalTypes.setSelectedItem(type); type = s.getProperty("workbench.export.sql.saveas.blobliterals", BlobMode.SaveToFile.getTypeString()); this.blobTypes.setSelectedItem(type); } @Override public String getMergeType() { return (String)mergeTypes.getSelectedItem(); } @Override public BlobMode getBlobMode() { String type = (String)blobTypes.getSelectedItem(); BlobMode mode = BlobMode.getMode(type); return mode; } @Override public String getDateLiteralType() { return (String)literalTypes.getSelectedItem(); } @Override public String getAlternateUpdateTable() { String s = alternateTable.getText(); if (StringUtil.isNonBlank(s)) return s.trim(); return null; } @Override public void setAlternateUpdateTable(String table) { this.alternateTable.setText((table == null ? "" : table.trim())); } @Override public int getCommitEvery() { int result = -1; try { String value = this.commitCount.getText(); if (value != null && value.length() > 0) { result = Integer.parseInt(value); } else { result = 0; } } catch (Exception e) { LogMgr.logDebug("SqlOptionsPanel.getCommitEvery", "Could not retrieve commit frequency", e); } return result; } public void setDbId(String dbid) { String currentType = MergeGenerator.Factory.getTypeForDBID(dbid); mergeTypes.setSelectedItem(currentType); } private void removeSyntaxType(String type) { syntaxType.removeItem(type); } private int getSyntaxTypeIndex(String type) { int count = syntaxType.getItemCount(); for (int i=0; i < count; i++) { String item = (String)syntaxType.getItemAt(i); if (item.equals(type)) return i; } return -1; } private void addSyntaxType(String type) { DefaultComboBoxModel model = (DefaultComboBoxModel)syntaxType.getModel(); int index = getSyntaxTypeIndex(type); if (type.equals("MERGE") && index == -1) { // merge always goes to the end model.addElement(type); } if (type.equals("UPDATE") && index == -1) { int insertIndex = getSyntaxTypeIndex("INSERT"); model.insertElementAt(type, insertIndex + 1); } if (type.equals("DELETE/INSERT") && index == -1) { int updateIndex = getSyntaxTypeIndex("UPDATE"); model.insertElementAt(type, updateIndex + 1); } } public void setIncludeMerge(boolean flag) { if (flag) { addSyntaxType("MERGE"); mergeTypes.setEnabled(true); mergeTypesLabel.setEnabled(true); } else { removeSyntaxType("MERGE"); mergeTypes.setEnabled(false); mergeTypesLabel.setEnabled(false); } } public void setIncludeUpdate(boolean flag) { if (flag) { addSyntaxType("UPDATE"); } else { removeSyntaxType("UPDATE"); } } public void setIncludeDeleteInsert(boolean flag) { if (flag) { addSyntaxType("DELETE/INSERT"); addSyntaxType("DELETE"); } else { removeSyntaxType("DELETE/INSERT"); removeSyntaxType("DELETE"); } } private String getSelectedSyntaxType() { return (String)syntaxType.getSelectedItem(); } @Override public ExportType getExportType() { String type = getSelectedSyntaxType(); if (type.equals("UPDATE")) { return ExportType.SQL_UPDATE; } if (type.equals("DELETE")) { return ExportType.SQL_DELETE; } if (type.equals("DELETE/INSERT")) { return ExportType.SQL_DELETE_INSERT; } if (type.equals("MERGE")) { return ExportType.SQL_MERGE; } return ExportType.SQL_INSERT; } @Override public boolean getCreateTable() { return createTable.isSelected(); } @Override public void setCommitEvery(int value) { if (value > 0) { this.commitCount.setText(Integer.toString(value)); } else { this.commitCount.setText(""); } } @Override public void setExportType(ExportType type) { switch (type) { case SQL_DELETE: syntaxType.setSelectedItem("DELETE"); break; case SQL_DELETE_INSERT: syntaxType.setSelectedItem("DELETE/INSERT"); break; case SQL_UPDATE: syntaxType.setSelectedItem("UPDATE"); break; case SQL_MERGE: syntaxType.setSelectedItem("MERGE"); break; default: syntaxType.setSelectedItem("INSERT"); } } @Override public void setCreateTable(boolean flag) { this.createTable.setSelected(flag); } @Override public List<String> getKeyColumns() { return keyColumns; } private void selectColumns() { if (this.tableColumns == null) return; if (this.columnSelectorPanel == null) { this.columnSelectorPanel = new KeyColumnSelectorPanel(tableColumns); } else { this.columnSelectorPanel.selectColumns(this.keyColumns); } int choice = JOptionPane.showConfirmDialog(SwingUtilities.getWindowAncestor(this), this.columnSelectorPanel, ResourceMgr.getString("MsgSelectKeyColumnsWindowTitle"), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE); if (choice == JOptionPane.OK_OPTION) { this.keyColumns = null; List selected = this.columnSelectorPanel.getSelectedColumns(); int size = selected.size(); this.keyColumns = new ArrayList<String>(size); for (int i=0; i < size; i++) { ColumnIdentifier col = (ColumnIdentifier)selected.get(i); this.keyColumns.add(col.getColumnName()); } boolean keysPresent = (size > 0); this.setIncludeUpdate(keysPresent); this.setIncludeDeleteInsert(keysPresent); this.setIncludeMerge(keysPresent); } } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { GridBagConstraints gridBagConstraints; typeGroup = new ButtonGroup(); selectKeys = new JButton(); jPanel2 = new JPanel(); jPanel4 = new JPanel(); literalTypesLabel = new JLabel(); literalTypes = new JComboBox(); mergeTypesLabel = new JLabel(); mergeTypes = new JComboBox(); jLabel2 = new JLabel(); syntaxType = new JComboBox(); extOptionsPanel = new JPanel(); jLabel1 = new JLabel(); alternateTable = new JTextField(); createTable = new JCheckBox(); blobTypesLabel = new JLabel(); blobTypes = new JComboBox(); commitLabel = new JLabel(); commitCount = new JTextField(); setLayout(new GridBagLayout()); selectKeys.setText(ResourceMgr.getString("LblSelectKeyColumns")); // NOI18N selectKeys.setToolTipText(ResourceMgr.getString("d_LblSelectKeyColumns")); // NOI18N selectKeys.addActionListener(this); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.anchor = GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new Insets(0, 4, 6, 0); add(selectKeys, gridBagConstraints); jPanel2.setLayout(new BorderLayout(10, 0)); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new Insets(3, 4, 0, 21); add(jPanel2, gridBagConstraints); jPanel4.setLayout(new GridBagLayout()); literalTypesLabel.setText(ResourceMgr.getString("LblLiteralType")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; jPanel4.add(literalTypesLabel, gridBagConstraints); literalTypes.setToolTipText(ResourceMgr.getDescription("LblLiteralType")); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 2; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(0, 4, 0, 0); jPanel4.add(literalTypes, gridBagConstraints); mergeTypesLabel.setText(ResourceMgr.getString("LblMergeType")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; jPanel4.add(mergeTypesLabel, gridBagConstraints); mergeTypes.setToolTipText(ResourceMgr.getDescription("LblLiteralType")); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 3; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(0, 4, 0, 0); jPanel4.add(mergeTypes, gridBagConstraints); jLabel2.setText(ResourceMgr.getString("LblSqlExpType")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; jPanel4.add(jLabel2, gridBagConstraints); syntaxType.setModel(new DefaultComboBoxModel(new String[] { "INSERT", "UPDATE", "DELETE/INSERT", "MERGE", "DELETE" })); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(0, 4, 0, 0); jPanel4.add(syntaxType, gridBagConstraints); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.FIRST_LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(3, 4, 8, 6); add(jPanel4, gridBagConstraints); extOptionsPanel.setLayout(new GridBagLayout()); jLabel1.setText(ResourceMgr.getString("LblUseExportTableName")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = GridBagConstraints.WEST; gridBagConstraints.insets = new Insets(5, 0, 0, 0); extOptionsPanel.add(jLabel1, gridBagConstraints); alternateTable.setMinimumSize(new Dimension(40, 20)); alternateTable.setPreferredSize(new Dimension(40, 20)); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 4; gridBagConstraints.gridwidth = 2; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new Insets(2, 0, 0, 4); extOptionsPanel.add(alternateTable, gridBagConstraints); createTable.setText(ResourceMgr.getString("LblExportIncludeCreateTable")); // NOI18N createTable.setToolTipText(ResourceMgr.getString("d_LblExportIncludeCreateTable")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(5, 0, 0, 0); extOptionsPanel.add(createTable, gridBagConstraints); blobTypesLabel.setText(ResourceMgr.getString("LblBlobType")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; extOptionsPanel.add(blobTypesLabel, gridBagConstraints); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new Insets(0, 4, 0, 0); extOptionsPanel.add(blobTypes, gridBagConstraints); commitLabel.setText(ResourceMgr.getString("LblExportCommitEvery")); // NOI18N gridBagConstraints = new GridBagConstraints(); gridBagConstraints.anchor = GridBagConstraints.LINE_START; extOptionsPanel.add(commitLabel, gridBagConstraints); commitCount.setColumns(4); commitCount.setMinimumSize(new Dimension(32, 20)); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.LINE_START; gridBagConstraints.insets = new Insets(0, 4, 0, 0); extOptionsPanel.add(commitCount, gridBagConstraints); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.FIRST_LINE_START; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new Insets(0, 4, 0, 2); add(extOptionsPanel, gridBagConstraints); } // Code for dispatching events from components to event handlers. public void actionPerformed(ActionEvent evt) { if (evt.getSource() == selectKeys) { SqlOptionsPanel.this.selectKeysActionPerformed(evt); } }// </editor-fold>//GEN-END:initComponents private void selectKeysActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectKeysActionPerformed selectColumns(); }//GEN-LAST:event_selectKeysActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables public JTextField alternateTable; public JComboBox blobTypes; public JLabel blobTypesLabel; public JTextField commitCount; public JLabel commitLabel; public JCheckBox createTable; public JPanel extOptionsPanel; public JLabel jLabel1; public JLabel jLabel2; public JPanel jPanel2; public JPanel jPanel4; public JComboBox literalTypes; public JLabel literalTypesLabel; public JComboBox mergeTypes; public JLabel mergeTypesLabel; public JButton selectKeys; public JComboBox syntaxType; public ButtonGroup typeGroup; // End of variables declaration//GEN-END:variables }
/* * Copyright (c) 2006, Stephen Kelvin Friedrich, All rights reserved. * * This a BSD license. If you use or enhance the code, I'd be pleased if you sent a mail to s.friedrich@eekboom.com * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * * Neither the name of the "Stephen Kelvin Friedrich" nor the names of its contributors may be used to endorse * or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.eekboom.utils; import java.text.Collator; import java.util.Comparator; /** * Utility class for common String operations */ public final class Strings { /** * <p>A string comparator that does case sensitive comparisons and handles embedded numbers correctly.</p> * <p><b>Do not use</b> if your app might ever run on any locale that uses more than 7-bit ascii characters.</p> */ private static final Comparator<String> NATURAL_COMPARATOR_ASCII = new Comparator<String>() { public int compare(String o1, String o2) { return compareNaturalAscii(o1, o2); } }; /** * <p>A string comparator that does case insensitive comparisons and handles embedded numbers correctly.</p> * <p><b>Do not use</b> if your app might ever run on any locale that uses more than 7-bit ascii characters.</p> */ private static final Comparator<String> IGNORE_CASE_NATURAL_COMPARATOR_ASCII = new Comparator<String>() { public int compare(String o1, String o2) { return compareNaturalIgnoreCaseAscii(o1, o2); } }; /** * This is a utility class (static methods only), don't instantiate. */ private Strings() { } /** * Returns a comparator that compares contained numbers based on their numeric values and compares other parts * using the current locale's order rules. * <p>For example in German locale this will be a comparator that handles umlauts correctly and ignores * upper/lower case differences.</p> * * @return <p>A string comparator that uses the current locale's order rules and handles embedded numbers * correctly.</p> * @see #getNaturalComparator(java.text.Collator) */ public static Comparator<String> getNaturalComparator() { Collator collator = Collator.getInstance(); return getNaturalComparator(collator); } /** * Returns a comparator that compares contained numbers based on their numeric values and compares other parts * using the given collator. * * @param collator used for locale specific comparison of text (non-number) subwords - must not be null * @return <p>A string comparator that uses the given Collator to compare subwords and handles embedded numbers * correctly.</p> * @see #getNaturalComparator() */ public static Comparator<String> getNaturalComparator(final Collator collator) { if(collator == null) { // it's important to explicitly handle this here - else the bug will manifest anytime later in possibly // unrelated code that tries to use the comparator throw new NullPointerException("collator must not be null"); } return new Comparator<String>() { public int compare(String o1, String o2) { return compareNatural(collator, o1, o2); } }; } /** * Returns a comparator that compares contained numbers based on their numeric values and compares other parts * based on each character's Unicode value. * * @return <p>a string comparator that does case sensitive comparisons on pure ascii strings and handles embedded * numbers correctly.</p> * <b>Do not use</b> if your app might ever run on any locale that uses more than 7-bit ascii characters. * @see #getNaturalComparator() * @see #getNaturalComparator(java.text.Collator) */ public static Comparator<String> getNaturalComparatorAscii() { return NATURAL_COMPARATOR_ASCII; } /** * Returns a comparator that compares contained numbers based on their numeric values and compares other parts * based on each character's Unicode value while ignore upper/lower case differences. * <b>Do not use</b> if your app might ever run on any locale that uses more than 7-bit ascii characters. * * @return <p>a string comparator that does case insensitive comparisons on pure ascii strings and handles embedded * numbers correctly.</p> * @see #getNaturalComparator() * @see #getNaturalComparator(java.text.Collator) */ public static Comparator<String> getNaturalComparatorIgnoreCaseAscii() { return IGNORE_CASE_NATURAL_COMPARATOR_ASCII; } /** * <p>Compares two strings using the current locale's rules and comparing contained numbers based on their numeric * values.</p> * <p>This is probably the best default comparison to use.</p> * <p>If you know that the texts to be compared are in a certain language that differs from the default locale's * langage, then get a collator for the desired locale ({@link java.text.Collator#getInstance(java.util.Locale)}) * and pass it to {@link #compareNatural(java.text.Collator, String, String)}</p> * * @param s first string * @param t second string * @return zero iff <code>s</code> and <code>t</code> are equal, * a value less than zero iff <code>s</code> lexicographically precedes <code>t</code> * and a value larger than zero iff <code>s</code> lexicographically follows <code>t</code> */ public static int compareNatural(String s, String t) { return compareNatural(s, t, false, Collator.getInstance()); } /** * <p>Compares two strings using the given collator and comparing contained numbers based on their numeric * values.</p> * * @param s first string * @param t second string * @return zero iff <code>s</code> and <code>t</code> are equal, * a value less than zero iff <code>s</code> lexicographically precedes <code>t</code> * and a value larger than zero iff <code>s</code> lexicographically follows <code>t</code> */ public static int compareNatural(Collator collator, String s, String t) { return compareNatural(s, t, true, collator); } /** * <p>Compares two strings using each character's Unicode value for non-digit characters and the numeric values off * any contained numbers.</p> * <p>(This will probably make sense only for strings containing 7-bit ascii characters only.)</p> * * @return zero iff <code>s</code> and <code>t</code> are equal, * a value less than zero iff <code>s</code> lexicographically precedes <code>t</code> * and a value larger than zero iff <code>s</code> lexicographically follows <code>t</code> */ public static int compareNaturalAscii(String s, String t) { return compareNatural(s, t, true, null); } /** * <p>Compares two strings using each character's Unicode value - ignoring upper/lower case - for non-digit * characters and the numeric values of any contained numbers.</p> * <p>(This will probably make sense only for strings containing 7-bit ascii characters only.)</p> * * @return zero iff <code>s</code> and <code>t</code> are equal, * a value less than zero iff <code>s</code> lexicographically precedes <code>t</code> * and a value larger than zero iff <code>s</code> lexicographically follows <code>t</code> */ public static int compareNaturalIgnoreCaseAscii(String s, String t) { return compareNatural(s, t, false, null); } /** * @param s first string * @param t second string * @param caseSensitive treat characters differing in case only as equal - will be ignored if a collator is given * @param collator used to compare subwords that aren't numbers - if null, characters will be compared * individually based on their Unicode value * @return zero iff <code>s</code> and <code>t</code> are equal, * a value less than zero iff <code>s</code> lexicographically precedes <code>t</code> * and a value larger than zero iff <code>s</code> lexicographically follows <code>t</code> */ private static int compareNatural(String s, String t, boolean caseSensitive, Collator collator) { int sIndex = 0; int tIndex = 0; int sLength = s.length(); int tLength = t.length(); while(true) { // both character indices are after a subword (or at zero) // Check if one string is at end if(sIndex == sLength && tIndex == tLength) { return 0; } if(sIndex == sLength) { return -1; } if(tIndex == tLength) { return 1; } // Compare sub word char sChar = s.charAt(sIndex); char tChar = t.charAt(tIndex); boolean sCharIsDigit = Character.isDigit(sChar); boolean tCharIsDigit = Character.isDigit(tChar); if(sCharIsDigit && tCharIsDigit) { // Compare numbers // skip leading 0s int sLeadingZeroCount = 0; while(sChar == '0') { ++sLeadingZeroCount; ++sIndex; if(sIndex == sLength) { break; } sChar = s.charAt(sIndex); } int tLeadingZeroCount = 0; while(tChar == '0') { ++tLeadingZeroCount; ++tIndex; if(tIndex == tLength) { break; } tChar = t.charAt(tIndex); } boolean sAllZero = sIndex == sLength || !Character.isDigit(sChar); boolean tAllZero = tIndex == tLength || !Character.isDigit(tChar); if(sAllZero && tAllZero) { continue; } if(sAllZero && !tAllZero) { return -1; } if(tAllZero) { return 1; } int diff = 0; do { if(diff == 0) { diff = sChar - tChar; } ++sIndex; ++tIndex; if(sIndex == sLength && tIndex == tLength) { return diff != 0 ? diff : sLeadingZeroCount - tLeadingZeroCount; } if(sIndex == sLength) { if(diff == 0) { return -1; } return Character.isDigit(t.charAt(tIndex)) ? -1 : diff; } if(tIndex == tLength) { if(diff == 0) { return 1; } return Character.isDigit(s.charAt(sIndex)) ? 1 : diff; } sChar = s.charAt(sIndex); tChar = t.charAt(tIndex); sCharIsDigit = Character.isDigit(sChar); tCharIsDigit = Character.isDigit(tChar); if(!sCharIsDigit && !tCharIsDigit) { // both number sub words have the same length if(diff != 0) { return diff; } break; } if(!sCharIsDigit) { return -1; } if(!tCharIsDigit) { return 1; } } while(true); } else { // Compare words if(collator != null) { // To use the collator the whole subwords have to be compared - character-by-character comparision // is not possible. So find the two subwords first int aw = sIndex; int bw = tIndex; do { ++sIndex; } while(sIndex < sLength && !Character.isDigit(s.charAt(sIndex))); do { ++tIndex; } while(tIndex < tLength && !Character.isDigit(t.charAt(tIndex))); String as = s.substring(aw, sIndex); String bs = t.substring(bw, tIndex); int subwordResult = collator.compare(as, bs); if(subwordResult != 0) { return subwordResult; } } else { // No collator specified. All characters should be ascii only. Compare character-by-character. do { if(sChar != tChar) { if(caseSensitive) { return sChar - tChar; } sChar = Character.toUpperCase(sChar); tChar = Character.toUpperCase(tChar); if(sChar != tChar) { sChar = Character.toLowerCase(sChar); tChar = Character.toLowerCase(tChar); if(sChar != tChar) { return sChar - tChar; } } } ++sIndex; ++tIndex; if(sIndex == sLength && tIndex == tLength) { return 0; } if(sIndex == sLength) { return -1; } if(tIndex == tLength) { return 1; } sChar = s.charAt(sIndex); tChar = t.charAt(tIndex); sCharIsDigit = Character.isDigit(sChar); tCharIsDigit = Character.isDigit(tChar); } while(!sCharIsDigit && !tCharIsDigit); } } } } }
/* This file was generated by SableCC (http://www.sablecc.org/). */ package analysis; import java.util.*; import node.*; public class DepthFirstAdapter extends AnalysisAdapter { public void inStart(Start node) { defaultIn(node); } public void outStart(Start node) { defaultOut(node); } public void defaultIn(@SuppressWarnings("unused") Node node) { // Do nothing } public void defaultOut(@SuppressWarnings("unused") Node node) { // Do nothing } @Override public void caseStart(Start node) { inStart(node); node.getPFile().apply(this); node.getEOF().apply(this); outStart(node); } public void inAFile(AFile node) { defaultIn(node); } public void outAFile(AFile node) { defaultOut(node); } @Override public void caseAFile(AFile node) { inAFile(node); { List<PClassDef> copy = new ArrayList<PClassDef>(node.getClassDefs()); for(PClassDef e : copy) { e.apply(this); } } if(node.getStmts() != null) { node.getStmts().apply(this); } { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } outAFile(node); } public void inAClassDef(AClassDef node) { defaultIn(node); } public void outAClassDef(AClassDef node) { defaultOut(node); } @Override public void caseAClassDef(AClassDef node) { inAClassDef(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols1()); for(TEol e : copy) { e.apply(this); } } if(node.getClassKeyword() != null) { node.getClassKeyword().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } if(node.getSuperDecl() != null) { node.getSuperDecl().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } { List<PMember> copy = new ArrayList<PMember>(node.getMembers()); for(PMember e : copy) { e.apply(this); } } { List<TEol> copy = new ArrayList<TEol>(node.getEols2()); for(TEol e : copy) { e.apply(this); } } if(node.getEnd() != null) { node.getEnd().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } outAClassDef(node); } public void inASuperDecl(ASuperDecl node) { defaultIn(node); } public void outASuperDecl(ASuperDecl node) { defaultOut(node); } @Override public void caseASuperDecl(ASuperDecl node) { inASuperDecl(node); if(node.getEol() != null) { node.getEol().apply(this); } if(node.getSuper() != null) { node.getSuper().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outASuperDecl(node); } public void inAFieldMember(AFieldMember node) { defaultIn(node); } public void outAFieldMember(AFieldMember node) { defaultOut(node); } @Override public void caseAFieldMember(AFieldMember node) { inAFieldMember(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getVar() != null) { node.getVar().apply(this); } if(node.getFieldName() != null) { node.getFieldName().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outAFieldMember(node); } public void inAMethodMember(AMethodMember node) { defaultIn(node); } public void outAMethodMember(AMethodMember node) { defaultOut(node); } @Override public void caseAMethodMember(AMethodMember node) { inAMethodMember(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getFun() != null) { node.getFun().apply(this); } if(node.getId() != null) { node.getId().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getParams() != null) { node.getParams().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } if(node.getReturnDecl() != null) { node.getReturnDecl().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getDo() != null) { node.getDo().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } if(node.getStmts() != null) { node.getStmts().apply(this); } if(node.getEnd() != null) { node.getEnd().apply(this); } if(node.getEol3() != null) { node.getEol3().apply(this); } outAMethodMember(node); } public void inAOperatorMember(AOperatorMember node) { defaultIn(node); } public void outAOperatorMember(AOperatorMember node) { defaultOut(node); } @Override public void caseAOperatorMember(AOperatorMember node) { inAOperatorMember(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getFun() != null) { node.getFun().apply(this); } if(node.getOperator() != null) { node.getOperator().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getParams() != null) { node.getParams().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } if(node.getReturnDecl() != null) { node.getReturnDecl().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getDo() != null) { node.getDo().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } if(node.getStmts() != null) { node.getStmts().apply(this); } if(node.getEnd() != null) { node.getEnd().apply(this); } if(node.getEol3() != null) { node.getEol3().apply(this); } outAOperatorMember(node); } public void inAInternMethodMember(AInternMethodMember node) { defaultIn(node); } public void outAInternMethodMember(AInternMethodMember node) { defaultOut(node); } @Override public void caseAInternMethodMember(AInternMethodMember node) { inAInternMethodMember(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getIntern() != null) { node.getIntern().apply(this); } if(node.getFun() != null) { node.getFun().apply(this); } if(node.getId() != null) { node.getId().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getParams() != null) { node.getParams().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } if(node.getReturnDecl() != null) { node.getReturnDecl().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outAInternMethodMember(node); } public void inAInternOperatorMember(AInternOperatorMember node) { defaultIn(node); } public void outAInternOperatorMember(AInternOperatorMember node) { defaultOut(node); } @Override public void caseAInternOperatorMember(AInternOperatorMember node) { inAInternOperatorMember(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getIntern() != null) { node.getIntern().apply(this); } if(node.getFun() != null) { node.getFun().apply(this); } if(node.getOperator() != null) { node.getOperator().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getParams() != null) { node.getParams().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } if(node.getReturnDecl() != null) { node.getReturnDecl().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outAInternOperatorMember(node); } public void inAParams(AParams node) { defaultIn(node); } public void outAParams(AParams node) { defaultOut(node); } @Override public void caseAParams(AParams node) { inAParams(node); if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getParam() != null) { node.getParam().apply(this); } { List<PAdditionalParam> copy = new ArrayList<PAdditionalParam>(node.getAdditionalParams()); for(PAdditionalParam e : copy) { e.apply(this); } } if(node.getEol2() != null) { node.getEol2().apply(this); } outAParams(node); } public void inAAdditionalParam(AAdditionalParam node) { defaultIn(node); } public void outAAdditionalParam(AAdditionalParam node) { defaultOut(node); } @Override public void caseAAdditionalParam(AAdditionalParam node) { inAAdditionalParam(node); if(node.getComma() != null) { node.getComma().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getParam() != null) { node.getParam().apply(this); } outAAdditionalParam(node); } public void inAParam(AParam node) { defaultIn(node); } public void outAParam(AParam node) { defaultOut(node); } @Override public void caseAParam(AParam node) { inAParam(node); if(node.getId() != null) { node.getId().apply(this); } if(node.getColon() != null) { node.getColon().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outAParam(node); } public void inAReturnDecl(AReturnDecl node) { defaultIn(node); } public void outAReturnDecl(AReturnDecl node) { defaultOut(node); } @Override public void caseAReturnDecl(AReturnDecl node) { inAReturnDecl(node); if(node.getColon() != null) { node.getColon().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outAReturnDecl(node); } public void inAEqOperator(AEqOperator node) { defaultIn(node); } public void outAEqOperator(AEqOperator node) { defaultOut(node); } @Override public void caseAEqOperator(AEqOperator node) { inAEqOperator(node); if(node.getEq() != null) { node.getEq().apply(this); } outAEqOperator(node); } public void inANeqOperator(ANeqOperator node) { defaultIn(node); } public void outANeqOperator(ANeqOperator node) { defaultOut(node); } @Override public void caseANeqOperator(ANeqOperator node) { inANeqOperator(node); if(node.getNeq() != null) { node.getNeq().apply(this); } outANeqOperator(node); } public void inALtOperator(ALtOperator node) { defaultIn(node); } public void outALtOperator(ALtOperator node) { defaultOut(node); } @Override public void caseALtOperator(ALtOperator node) { inALtOperator(node); if(node.getLt() != null) { node.getLt().apply(this); } outALtOperator(node); } public void inAGtOperator(AGtOperator node) { defaultIn(node); } public void outAGtOperator(AGtOperator node) { defaultOut(node); } @Override public void caseAGtOperator(AGtOperator node) { inAGtOperator(node); if(node.getGt() != null) { node.getGt().apply(this); } outAGtOperator(node); } public void inALteqOperator(ALteqOperator node) { defaultIn(node); } public void outALteqOperator(ALteqOperator node) { defaultOut(node); } @Override public void caseALteqOperator(ALteqOperator node) { inALteqOperator(node); if(node.getLteq() != null) { node.getLteq().apply(this); } outALteqOperator(node); } public void inAGteqOperator(AGteqOperator node) { defaultIn(node); } public void outAGteqOperator(AGteqOperator node) { defaultOut(node); } @Override public void caseAGteqOperator(AGteqOperator node) { inAGteqOperator(node); if(node.getGteq() != null) { node.getGteq().apply(this); } outAGteqOperator(node); } public void inAPlusOperator(APlusOperator node) { defaultIn(node); } public void outAPlusOperator(APlusOperator node) { defaultOut(node); } @Override public void caseAPlusOperator(APlusOperator node) { inAPlusOperator(node); if(node.getPlus() != null) { node.getPlus().apply(this); } outAPlusOperator(node); } public void inAMinusOperator(AMinusOperator node) { defaultIn(node); } public void outAMinusOperator(AMinusOperator node) { defaultOut(node); } @Override public void caseAMinusOperator(AMinusOperator node) { inAMinusOperator(node); if(node.getMinus() != null) { node.getMinus().apply(this); } outAMinusOperator(node); } public void inAStarOperator(AStarOperator node) { defaultIn(node); } public void outAStarOperator(AStarOperator node) { defaultOut(node); } @Override public void caseAStarOperator(AStarOperator node) { inAStarOperator(node); if(node.getStar() != null) { node.getStar().apply(this); } outAStarOperator(node); } public void inASlashOperator(ASlashOperator node) { defaultIn(node); } public void outASlashOperator(ASlashOperator node) { defaultOut(node); } @Override public void caseASlashOperator(ASlashOperator node) { inASlashOperator(node); if(node.getSlash() != null) { node.getSlash().apply(this); } outASlashOperator(node); } public void inAPercentOperator(APercentOperator node) { defaultIn(node); } public void outAPercentOperator(APercentOperator node) { defaultOut(node); } @Override public void caseAPercentOperator(APercentOperator node) { inAPercentOperator(node); if(node.getPercent() != null) { node.getPercent().apply(this); } outAPercentOperator(node); } public void inAStmts(AStmts node) { defaultIn(node); } public void outAStmts(AStmts node) { defaultOut(node); } @Override public void caseAStmts(AStmts node) { inAStmts(node); { List<PStmt> copy = new ArrayList<PStmt>(node.getStmts()); for(PStmt e : copy) { e.apply(this); } } outAStmts(node); } public void inAVarDefStmt(AVarDefStmt node) { defaultIn(node); } public void outAVarDefStmt(AVarDefStmt node) { defaultOut(node); } @Override public void caseAVarDefStmt(AVarDefStmt node) { inAVarDefStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getVar() != null) { node.getVar().apply(this); } if(node.getId() != null) { node.getId().apply(this); } if(node.getColon() != null) { node.getColon().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outAVarDefStmt(node); } public void inAVarInitStmt(AVarInitStmt node) { defaultIn(node); } public void outAVarInitStmt(AVarInitStmt node) { defaultOut(node); } @Override public void caseAVarInitStmt(AVarInitStmt node) { inAVarInitStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getVar() != null) { node.getVar().apply(this); } if(node.getId() != null) { node.getId().apply(this); } if(node.getEq() != null) { node.getEq().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } outAVarInitStmt(node); } public void inAVarAssignStmt(AVarAssignStmt node) { defaultIn(node); } public void outAVarAssignStmt(AVarAssignStmt node) { defaultOut(node); } @Override public void caseAVarAssignStmt(AVarAssignStmt node) { inAVarAssignStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getId() != null) { node.getId().apply(this); } if(node.getEq() != null) { node.getEq().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } outAVarAssignStmt(node); } public void inAFieldAssignStmt(AFieldAssignStmt node) { defaultIn(node); } public void outAFieldAssignStmt(AFieldAssignStmt node) { defaultOut(node); } @Override public void caseAFieldAssignStmt(AFieldAssignStmt node) { inAFieldAssignStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getFieldName() != null) { node.getFieldName().apply(this); } if(node.getEq() != null) { node.getEq().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } outAFieldAssignStmt(node); } public void inACallStmt(ACallStmt node) { defaultIn(node); } public void outACallStmt(ACallStmt node) { defaultOut(node); } @Override public void caseACallStmt(ACallStmt node) { inACallStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getCall() != null) { node.getCall().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outACallStmt(node); } public void inASelfCallStmt(ASelfCallStmt node) { defaultIn(node); } public void outASelfCallStmt(ASelfCallStmt node) { defaultOut(node); } @Override public void caseASelfCallStmt(ASelfCallStmt node) { inASelfCallStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getSelfCall() != null) { node.getSelfCall().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outASelfCallStmt(node); } public void inAWhileStmt(AWhileStmt node) { defaultIn(node); } public void outAWhileStmt(AWhileStmt node) { defaultOut(node); } @Override public void caseAWhileStmt(AWhileStmt node) { inAWhileStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getWhile() != null) { node.getWhile().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } if(node.getDo() != null) { node.getDo().apply(this); } if(node.getEol3() != null) { node.getEol3().apply(this); } if(node.getStmts() != null) { node.getStmts().apply(this); } if(node.getEnd() != null) { node.getEnd().apply(this); } if(node.getEol4() != null) { node.getEol4().apply(this); } outAWhileStmt(node); } public void inAIfStmt(AIfStmt node) { defaultIn(node); } public void outAIfStmt(AIfStmt node) { defaultOut(node); } @Override public void caseAIfStmt(AIfStmt node) { inAIfStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getIf() != null) { node.getIf().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } if(node.getThen() != null) { node.getThen().apply(this); } if(node.getEol3() != null) { node.getEol3().apply(this); } if(node.getStmts() != null) { node.getStmts().apply(this); } if(node.getElsePart() != null) { node.getElsePart().apply(this); } if(node.getEnd() != null) { node.getEnd().apply(this); } if(node.getEol4() != null) { node.getEol4().apply(this); } outAIfStmt(node); } public void inAReturnStmt(AReturnStmt node) { defaultIn(node); } public void outAReturnStmt(AReturnStmt node) { defaultOut(node); } @Override public void caseAReturnStmt(AReturnStmt node) { inAReturnStmt(node); { List<TEol> copy = new ArrayList<TEol>(node.getEols()); for(TEol e : copy) { e.apply(this); } } if(node.getReturn() != null) { node.getReturn().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } outAReturnStmt(node); } public void inAElsePart(AElsePart node) { defaultIn(node); } public void outAElsePart(AElsePart node) { defaultOut(node); } @Override public void caseAElsePart(AElsePart node) { inAElsePart(node); if(node.getElse() != null) { node.getElse().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getStmts() != null) { node.getStmts().apply(this); } outAElsePart(node); } public void inAOrExp(AOrExp node) { defaultIn(node); } public void outAOrExp(AOrExp node) { defaultOut(node); } @Override public void caseAOrExp(AOrExp node) { inAOrExp(node); if(node.getExp() != null) { node.getExp().apply(this); } if(node.getOr() != null) { node.getOr().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getConjunction() != null) { node.getConjunction().apply(this); } outAOrExp(node); } public void inASimpleExp(ASimpleExp node) { defaultIn(node); } public void outASimpleExp(ASimpleExp node) { defaultOut(node); } @Override public void caseASimpleExp(ASimpleExp node) { inASimpleExp(node); if(node.getConjunction() != null) { node.getConjunction().apply(this); } outASimpleExp(node); } public void inAAndConjunction(AAndConjunction node) { defaultIn(node); } public void outAAndConjunction(AAndConjunction node) { defaultOut(node); } @Override public void caseAAndConjunction(AAndConjunction node) { inAAndConjunction(node); if(node.getConjunction() != null) { node.getConjunction().apply(this); } if(node.getAnd() != null) { node.getAnd().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getComparison() != null) { node.getComparison().apply(this); } outAAndConjunction(node); } public void inASimpleConjunction(ASimpleConjunction node) { defaultIn(node); } public void outASimpleConjunction(ASimpleConjunction node) { defaultOut(node); } @Override public void caseASimpleConjunction(ASimpleConjunction node) { inASimpleConjunction(node); if(node.getComparison() != null) { node.getComparison().apply(this); } outASimpleConjunction(node); } public void inAEqComparison(AEqComparison node) { defaultIn(node); } public void outAEqComparison(AEqComparison node) { defaultOut(node); } @Override public void caseAEqComparison(AEqComparison node) { inAEqComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getEq() != null) { node.getEq().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outAEqComparison(node); } public void inANeqComparison(ANeqComparison node) { defaultIn(node); } public void outANeqComparison(ANeqComparison node) { defaultOut(node); } @Override public void caseANeqComparison(ANeqComparison node) { inANeqComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getNeq() != null) { node.getNeq().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outANeqComparison(node); } public void inALtComparison(ALtComparison node) { defaultIn(node); } public void outALtComparison(ALtComparison node) { defaultOut(node); } @Override public void caseALtComparison(ALtComparison node) { inALtComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getLt() != null) { node.getLt().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outALtComparison(node); } public void inAGtComparison(AGtComparison node) { defaultIn(node); } public void outAGtComparison(AGtComparison node) { defaultOut(node); } @Override public void caseAGtComparison(AGtComparison node) { inAGtComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getGt() != null) { node.getGt().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outAGtComparison(node); } public void inALteqComparison(ALteqComparison node) { defaultIn(node); } public void outALteqComparison(ALteqComparison node) { defaultOut(node); } @Override public void caseALteqComparison(ALteqComparison node) { inALteqComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getLteq() != null) { node.getLteq().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outALteqComparison(node); } public void inAGteqComparison(AGteqComparison node) { defaultIn(node); } public void outAGteqComparison(AGteqComparison node) { defaultOut(node); } @Override public void caseAGteqComparison(AGteqComparison node) { inAGteqComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getGteq() != null) { node.getGteq().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outAGteqComparison(node); } public void inAIsComparison(AIsComparison node) { defaultIn(node); } public void outAIsComparison(AIsComparison node) { defaultOut(node); } @Override public void caseAIsComparison(AIsComparison node) { inAIsComparison(node); if(node.getComparison() != null) { node.getComparison().apply(this); } if(node.getIs() != null) { node.getIs().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArithExp() != null) { node.getArithExp().apply(this); } outAIsComparison(node); } public void inASimpleComparison(ASimpleComparison node) { defaultIn(node); } public void outASimpleComparison(ASimpleComparison node) { defaultOut(node); } @Override public void caseASimpleComparison(ASimpleComparison node) { inASimpleComparison(node); if(node.getArithExp() != null) { node.getArithExp().apply(this); } outASimpleComparison(node); } public void inAAddArithExp(AAddArithExp node) { defaultIn(node); } public void outAAddArithExp(AAddArithExp node) { defaultOut(node); } @Override public void caseAAddArithExp(AAddArithExp node) { inAAddArithExp(node); if(node.getArithExp() != null) { node.getArithExp().apply(this); } if(node.getPlus() != null) { node.getPlus().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getFactor() != null) { node.getFactor().apply(this); } outAAddArithExp(node); } public void inASubArithExp(ASubArithExp node) { defaultIn(node); } public void outASubArithExp(ASubArithExp node) { defaultOut(node); } @Override public void caseASubArithExp(ASubArithExp node) { inASubArithExp(node); if(node.getArithExp() != null) { node.getArithExp().apply(this); } if(node.getMinus() != null) { node.getMinus().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getFactor() != null) { node.getFactor().apply(this); } outASubArithExp(node); } public void inASimpleArithExp(ASimpleArithExp node) { defaultIn(node); } public void outASimpleArithExp(ASimpleArithExp node) { defaultOut(node); } @Override public void caseASimpleArithExp(ASimpleArithExp node) { inASimpleArithExp(node); if(node.getFactor() != null) { node.getFactor().apply(this); } outASimpleArithExp(node); } public void inAMulFactor(AMulFactor node) { defaultIn(node); } public void outAMulFactor(AMulFactor node) { defaultOut(node); } @Override public void caseAMulFactor(AMulFactor node) { inAMulFactor(node); if(node.getFactor() != null) { node.getFactor().apply(this); } if(node.getStar() != null) { node.getStar().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outAMulFactor(node); } public void inADivFactor(ADivFactor node) { defaultIn(node); } public void outADivFactor(ADivFactor node) { defaultOut(node); } @Override public void caseADivFactor(ADivFactor node) { inADivFactor(node); if(node.getFactor() != null) { node.getFactor().apply(this); } if(node.getSlash() != null) { node.getSlash().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outADivFactor(node); } public void inAModFactor(AModFactor node) { defaultIn(node); } public void outAModFactor(AModFactor node) { defaultOut(node); } @Override public void caseAModFactor(AModFactor node) { inAModFactor(node); if(node.getFactor() != null) { node.getFactor().apply(this); } if(node.getPercent() != null) { node.getPercent().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outAModFactor(node); } public void inASimpleFactor(ASimpleFactor node) { defaultIn(node); } public void outASimpleFactor(ASimpleFactor node) { defaultOut(node); } @Override public void caseASimpleFactor(ASimpleFactor node) { inASimpleFactor(node); if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outASimpleFactor(node); } public void inANotLeftUnaryExp(ANotLeftUnaryExp node) { defaultIn(node); } public void outANotLeftUnaryExp(ANotLeftUnaryExp node) { defaultOut(node); } @Override public void caseANotLeftUnaryExp(ANotLeftUnaryExp node) { inANotLeftUnaryExp(node); if(node.getNot() != null) { node.getNot().apply(this); } if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outANotLeftUnaryExp(node); } public void inANegLeftUnaryExp(ANegLeftUnaryExp node) { defaultIn(node); } public void outANegLeftUnaryExp(ANegLeftUnaryExp node) { defaultOut(node); } @Override public void caseANegLeftUnaryExp(ANegLeftUnaryExp node) { inANegLeftUnaryExp(node); if(node.getMinus() != null) { node.getMinus().apply(this); } if(node.getLeftUnaryExp() != null) { node.getLeftUnaryExp().apply(this); } outANegLeftUnaryExp(node); } public void inASimpleLeftUnaryExp(ASimpleLeftUnaryExp node) { defaultIn(node); } public void outASimpleLeftUnaryExp(ASimpleLeftUnaryExp node) { defaultOut(node); } @Override public void caseASimpleLeftUnaryExp(ASimpleLeftUnaryExp node) { inASimpleLeftUnaryExp(node); if(node.getRightUnaryExp() != null) { node.getRightUnaryExp().apply(this); } outASimpleLeftUnaryExp(node); } public void inACallRightUnaryExp(ACallRightUnaryExp node) { defaultIn(node); } public void outACallRightUnaryExp(ACallRightUnaryExp node) { defaultOut(node); } @Override public void caseACallRightUnaryExp(ACallRightUnaryExp node) { inACallRightUnaryExp(node); if(node.getCall() != null) { node.getCall().apply(this); } outACallRightUnaryExp(node); } public void inAIsaRightUnaryExp(AIsaRightUnaryExp node) { defaultIn(node); } public void outAIsaRightUnaryExp(AIsaRightUnaryExp node) { defaultOut(node); } @Override public void caseAIsaRightUnaryExp(AIsaRightUnaryExp node) { inAIsaRightUnaryExp(node); if(node.getRightUnaryExp() != null) { node.getRightUnaryExp().apply(this); } if(node.getIsa() != null) { node.getIsa().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outAIsaRightUnaryExp(node); } public void inAAsRightUnaryExp(AAsRightUnaryExp node) { defaultIn(node); } public void outAAsRightUnaryExp(AAsRightUnaryExp node) { defaultOut(node); } @Override public void caseAAsRightUnaryExp(AAsRightUnaryExp node) { inAAsRightUnaryExp(node); if(node.getRightUnaryExp() != null) { node.getRightUnaryExp().apply(this); } if(node.getAs() != null) { node.getAs().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outAAsRightUnaryExp(node); } public void inASimpleRightUnaryExp(ASimpleRightUnaryExp node) { defaultIn(node); } public void outASimpleRightUnaryExp(ASimpleRightUnaryExp node) { defaultOut(node); } @Override public void caseASimpleRightUnaryExp(ASimpleRightUnaryExp node) { inASimpleRightUnaryExp(node); if(node.getTerm() != null) { node.getTerm().apply(this); } outASimpleRightUnaryExp(node); } public void inASelfCallTerm(ASelfCallTerm node) { defaultIn(node); } public void outASelfCallTerm(ASelfCallTerm node) { defaultOut(node); } @Override public void caseASelfCallTerm(ASelfCallTerm node) { inASelfCallTerm(node); if(node.getSelfCall() != null) { node.getSelfCall().apply(this); } outASelfCallTerm(node); } public void inAParTerm(AParTerm node) { defaultIn(node); } public void outAParTerm(AParTerm node) { defaultOut(node); } @Override public void caseAParTerm(AParTerm node) { inAParTerm(node); if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getExp() != null) { node.getExp().apply(this); } if(node.getEol2() != null) { node.getEol2().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } outAParTerm(node); } public void inANewTerm(ANewTerm node) { defaultIn(node); } public void outANewTerm(ANewTerm node) { defaultOut(node); } @Override public void caseANewTerm(ANewTerm node) { inANewTerm(node); if(node.getNew() != null) { node.getNew().apply(this); } if(node.getClassName() != null) { node.getClassName().apply(this); } outANewTerm(node); } public void inAFieldTerm(AFieldTerm node) { defaultIn(node); } public void outAFieldTerm(AFieldTerm node) { defaultOut(node); } @Override public void caseAFieldTerm(AFieldTerm node) { inAFieldTerm(node); if(node.getFieldName() != null) { node.getFieldName().apply(this); } outAFieldTerm(node); } public void inAVarTerm(AVarTerm node) { defaultIn(node); } public void outAVarTerm(AVarTerm node) { defaultOut(node); } @Override public void caseAVarTerm(AVarTerm node) { inAVarTerm(node); if(node.getId() != null) { node.getId().apply(this); } outAVarTerm(node); } public void inANumTerm(ANumTerm node) { defaultIn(node); } public void outANumTerm(ANumTerm node) { defaultOut(node); } @Override public void caseANumTerm(ANumTerm node) { inANumTerm(node); if(node.getNumber() != null) { node.getNumber().apply(this); } outANumTerm(node); } public void inANullTerm(ANullTerm node) { defaultIn(node); } public void outANullTerm(ANullTerm node) { defaultOut(node); } @Override public void caseANullTerm(ANullTerm node) { inANullTerm(node); if(node.getNull() != null) { node.getNull().apply(this); } outANullTerm(node); } public void inASelfTerm(ASelfTerm node) { defaultIn(node); } public void outASelfTerm(ASelfTerm node) { defaultOut(node); } @Override public void caseASelfTerm(ASelfTerm node) { inASelfTerm(node); if(node.getSelf() != null) { node.getSelf().apply(this); } outASelfTerm(node); } public void inATrueTerm(ATrueTerm node) { defaultIn(node); } public void outATrueTerm(ATrueTerm node) { defaultOut(node); } @Override public void caseATrueTerm(ATrueTerm node) { inATrueTerm(node); if(node.getTrue() != null) { node.getTrue().apply(this); } outATrueTerm(node); } public void inAFalseTerm(AFalseTerm node) { defaultIn(node); } public void outAFalseTerm(AFalseTerm node) { defaultOut(node); } @Override public void caseAFalseTerm(AFalseTerm node) { inAFalseTerm(node); if(node.getFalse() != null) { node.getFalse().apply(this); } outAFalseTerm(node); } public void inAStringTerm(AStringTerm node) { defaultIn(node); } public void outAStringTerm(AStringTerm node) { defaultOut(node); } @Override public void caseAStringTerm(AStringTerm node) { inAStringTerm(node); if(node.getString() != null) { node.getString().apply(this); } outAStringTerm(node); } public void inACall(ACall node) { defaultIn(node); } public void outACall(ACall node) { defaultOut(node); } @Override public void caseACall(ACall node) { inACall(node); if(node.getRightUnaryExp() != null) { node.getRightUnaryExp().apply(this); } if(node.getDot() != null) { node.getDot().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getId() != null) { node.getId().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getArgs() != null) { node.getArgs().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } outACall(node); } public void inASelfCall(ASelfCall node) { defaultIn(node); } public void outASelfCall(ASelfCall node) { defaultOut(node); } @Override public void caseASelfCall(ASelfCall node) { inASelfCall(node); if(node.getId() != null) { node.getId().apply(this); } if(node.getLPar() != null) { node.getLPar().apply(this); } if(node.getArgs() != null) { node.getArgs().apply(this); } if(node.getRPar() != null) { node.getRPar().apply(this); } outASelfCall(node); } public void inAArgs(AArgs node) { defaultIn(node); } public void outAArgs(AArgs node) { defaultOut(node); } @Override public void caseAArgs(AArgs node) { inAArgs(node); if(node.getEol1() != null) { node.getEol1().apply(this); } if(node.getArg() != null) { node.getArg().apply(this); } { List<PAdditionalArg> copy = new ArrayList<PAdditionalArg>(node.getAdditionalArgs()); for(PAdditionalArg e : copy) { e.apply(this); } } if(node.getEol2() != null) { node.getEol2().apply(this); } outAArgs(node); } public void inAAdditionalArg(AAdditionalArg node) { defaultIn(node); } public void outAAdditionalArg(AAdditionalArg node) { defaultOut(node); } @Override public void caseAAdditionalArg(AAdditionalArg node) { inAAdditionalArg(node); if(node.getComma() != null) { node.getComma().apply(this); } if(node.getEol() != null) { node.getEol().apply(this); } if(node.getArg() != null) { node.getArg().apply(this); } outAAdditionalArg(node); } public void inAArg(AArg node) { defaultIn(node); } public void outAArg(AArg node) { defaultOut(node); } @Override public void caseAArg(AArg node) { inAArg(node); if(node.getExp() != null) { node.getExp().apply(this); } outAArg(node); } }
/* * Copyright (c) 2015 LingoChamp Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.liulishuo.filedownloader.download; import android.os.SystemClock; import com.liulishuo.filedownloader.connection.FileDownloadConnection; import com.liulishuo.filedownloader.exception.FileDownloadGiveUpRetryException; import com.liulishuo.filedownloader.exception.FileDownloadNetworkPolicyException; import com.liulishuo.filedownloader.services.FileDownloadDatabase; import com.liulishuo.filedownloader.stream.FileDownloadOutputStream; import com.liulishuo.filedownloader.util.FileDownloadLog; import com.liulishuo.filedownloader.util.FileDownloadUtils; import java.io.IOException; import java.io.InputStream; import static com.liulishuo.filedownloader.model.FileDownloadModel.TOTAL_VALUE_IN_CHUNKED_RESOURCE; /** * Fetch data from the provided connection. */ public class FetchDataTask { static final int BUFFER_SIZE = 1024 * 4; private final ProcessCallback callback; private final int downloadId; private final int connectionIndex; private final DownloadRunnable hostRunnable; private final FileDownloadConnection connection; private final boolean isWifiRequired; private final long startOffset; private final long endOffset; private final long contentLength; private final String path; long currentOffset; private FileDownloadOutputStream outputStream; private volatile boolean paused; public void pause() { paused = true; } private FetchDataTask(FileDownloadConnection connection, ConnectionProfile connectionProfile, DownloadRunnable host, int id, int connectionIndex, boolean isWifiRequired, ProcessCallback callback, String path) { this.callback = callback; this.path = path; this.connection = connection; this.isWifiRequired = isWifiRequired; this.hostRunnable = host; this.connectionIndex = connectionIndex; this.downloadId = id; this.database = CustomComponentHolder.getImpl().getDatabaseInstance(); startOffset = connectionProfile.startOffset; endOffset = connectionProfile.endOffset; currentOffset = connectionProfile.currentOffset; contentLength = connectionProfile.contentLength; } public void run() throws IOException, IllegalAccessException, IllegalArgumentException, FileDownloadGiveUpRetryException { if (paused) return; final long contentLength = FileDownloadUtils.findContentLength(connectionIndex, connection); if (contentLength == 0) { throw new FileDownloadGiveUpRetryException(FileDownloadUtils. formatString("there isn't any content need to download on %d-%d with the content-length is 0", downloadId, connectionIndex)); } if (this.contentLength > 0 && contentLength != this.contentLength) { final String range; if (endOffset == 0) { range = FileDownloadUtils.formatString("range[%d-)", currentOffset); } else { range = FileDownloadUtils.formatString("range[%d-%d)", currentOffset, endOffset); } throw new FileDownloadGiveUpRetryException(FileDownloadUtils. formatString("require %s with contentLength(%d), but the " + "backend response contentLength is %d on downloadId[%d]-connectionIndex[%d]," + " please ask your backend dev to fix such problem.", range, this.contentLength, contentLength, downloadId, connectionIndex)); } final long fetchBeginOffset = currentOffset; // start fetch InputStream inputStream = null; FileDownloadOutputStream outputStream = null; try { final boolean isSupportSeek = CustomComponentHolder.getImpl().isSupportSeek(); if (hostRunnable != null && !isSupportSeek) { throw new IllegalAccessException("can't using multi-download when the output stream can't support seek"); } this.outputStream = outputStream = FileDownloadUtils.createOutputStream(path); if (isSupportSeek) { outputStream.seek(currentOffset); } if (FileDownloadLog.NEED_LOG) { FileDownloadLog.d(this, "start fetch(%d): range [%d, %d), seek to[%d]", connectionIndex, startOffset, endOffset, currentOffset); } inputStream = connection.getInputStream(); byte[] buff = new byte[BUFFER_SIZE]; if (paused) return; do { int byteCount = inputStream.read(buff); if (byteCount == -1) { break; } outputStream.write(buff, 0, byteCount); currentOffset += byteCount; // callback progress callback.onProgress(byteCount); checkAndSync(); // check status if (paused) return; if (isWifiRequired && FileDownloadUtils.isNetworkNotOnWifiType()) { throw new FileDownloadNetworkPolicyException(); } } while (true); } finally { if (inputStream != null) try { inputStream.close(); } catch (IOException e) { e.printStackTrace(); } try { if (outputStream != null) sync(); } finally { if (outputStream != null) try { outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } final long fetchedLength = currentOffset - fetchBeginOffset; if (contentLength != TOTAL_VALUE_IN_CHUNKED_RESOURCE && contentLength != fetchedLength) { throw new FileDownloadGiveUpRetryException( FileDownloadUtils.formatString("fetched length[%d] != content length[%d]," + " range[%d, %d) offset[%d] fetch begin offset", fetchedLength, contentLength, startOffset, endOffset, currentOffset, fetchBeginOffset)); } // callback completed callback.onCompleted(hostRunnable, startOffset, endOffset); } private final FileDownloadDatabase database; private volatile long lastSyncBytes = 0; private volatile long lastSyncTimestamp = 0; private void checkAndSync() { final long now = SystemClock.elapsedRealtime(); final long bytesDelta = currentOffset - lastSyncBytes; final long timestampDelta = now - lastSyncTimestamp; if (FileDownloadUtils.isNeedSync(bytesDelta, timestampDelta)) { sync(); lastSyncBytes = currentOffset; lastSyncTimestamp = now; } } private void sync() { final long startTimestamp = SystemClock.uptimeMillis(); boolean bufferPersistToDevice; try { outputStream.flushAndSync(); bufferPersistToDevice = true; } catch (IOException e) { bufferPersistToDevice = false; if (FileDownloadLog.NEED_LOG) { FileDownloadLog.d(this, "Because of the system cannot guarantee that all " + "the buffers have been synchronized with physical media, or write to file " + "failed, we just not flushAndSync process to database too %s", e); } } if (bufferPersistToDevice) { final boolean isBelongMultiConnection = hostRunnable != null; if (isBelongMultiConnection) { // only need update the connection table. database.updateConnectionModel(downloadId, connectionIndex, currentOffset); } else { // only need update the filedownloader table. callback.syncProgressFromCache(); } if (FileDownloadLog.NEED_LOG) { FileDownloadLog.d(this, "require flushAndSync id[%d] index[%d] offset[%d], consume[%d]", downloadId, connectionIndex, currentOffset, SystemClock.uptimeMillis() - startTimestamp); } } } public static class Builder { DownloadRunnable downloadRunnable; FileDownloadConnection connection; ConnectionProfile connectionProfile; ProcessCallback callback; String path; Boolean isWifiRequired; Integer connectionIndex; Integer downloadId; public Builder setConnection(FileDownloadConnection connection) { this.connection = connection; return this; } public Builder setConnectionProfile(ConnectionProfile connectionProfile) { this.connectionProfile = connectionProfile; return this; } public Builder setCallback(ProcessCallback callback) { this.callback = callback; return this; } public Builder setPath(String path) { this.path = path; return this; } public Builder setWifiRequired(boolean wifiRequired) { isWifiRequired = wifiRequired; return this; } public Builder setHost(DownloadRunnable downloadRunnable) { this.downloadRunnable = downloadRunnable; return this; } public Builder setConnectionIndex(int connectionIndex) { this.connectionIndex = connectionIndex; return this; } public Builder setDownloadId(int downloadId) { this.downloadId = downloadId; return this; } public FetchDataTask build() throws IllegalArgumentException { if (isWifiRequired == null || connection == null || connectionProfile == null || callback == null || path == null || downloadId == null || connectionIndex == null) throw new IllegalArgumentException(); return new FetchDataTask(connection, connectionProfile, downloadRunnable, downloadId, connectionIndex, isWifiRequired, callback, path); } } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.build.bom.bomr; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.stream.Collectors; import org.apache.maven.artifact.versioning.DefaultArtifactVersion; import org.gradle.api.InvalidUserDataException; import org.gradle.api.internal.tasks.userinput.UserInputHandler; import org.springframework.boot.build.bom.Library; import org.springframework.boot.build.bom.Library.DependencyVersions; import org.springframework.boot.build.bom.Library.Group; import org.springframework.boot.build.bom.Library.Module; import org.springframework.boot.build.bom.Library.ProhibitedVersion; import org.springframework.boot.build.bom.Library.VersionAlignment; import org.springframework.boot.build.bom.UpgradePolicy; import org.springframework.boot.build.bom.bomr.version.DependencyVersion; import org.springframework.util.StringUtils; /** * Interactive {@link UpgradeResolver} that uses command line input to choose the upgrades * to apply. * * @author Andy Wilkinson */ public final class InteractiveUpgradeResolver implements UpgradeResolver { private final VersionResolver versionResolver; private final UpgradePolicy upgradePolicy; private final UserInputHandler userInputHandler; InteractiveUpgradeResolver(VersionResolver versionResolver, UpgradePolicy upgradePolicy, UserInputHandler userInputHandler) { this.versionResolver = versionResolver; this.upgradePolicy = upgradePolicy; this.userInputHandler = userInputHandler; } @Override public List<Upgrade> resolveUpgrades(Collection<Library> libraries) { Map<String, Library> librariesByName = new HashMap<>(); for (Library library : libraries) { librariesByName.put(library.getName(), library); } return libraries.stream().filter((library) -> !library.getName().equals("Spring Boot")) .map((library) -> resolveUpgrade(library, librariesByName)).filter((upgrade) -> upgrade != null) .collect(Collectors.toList()); } private Upgrade resolveUpgrade(Library library, Map<String, Library> libraries) { List<VersionOption> versionOptions = getVersionOptions(library, libraries); if (versionOptions.isEmpty()) { return null; } VersionOption current = new VersionOption(library.getVersion().getVersion()); VersionOption selected = this.userInputHandler .selectOption(library.getName() + " " + library.getVersion().getVersion(), versionOptions, current); return (selected.equals(current)) ? null : new Upgrade(library, selected.version); } private List<VersionOption> getVersionOptions(Library library, Map<String, Library> libraries) { if (library.getVersion().getVersionAlignment() != null) { return determineAlignedVersionOption(library, libraries); } return determineResolvedVersionOptions(library); } private List<VersionOption> determineResolvedVersionOptions(Library library) { Map<String, SortedSet<DependencyVersion>> moduleVersions = new LinkedHashMap<>(); DependencyVersion libraryVersion = library.getVersion().getVersion(); for (Group group : library.getGroups()) { for (Module module : group.getModules()) { moduleVersions.put(group.getId() + ":" + module.getName(), getLaterVersionsForModule(group.getId(), module.getName(), libraryVersion)); } for (String bom : group.getBoms()) { moduleVersions.put(group.getId() + ":" + bom, getLaterVersionsForModule(group.getId(), bom, libraryVersion)); } for (String plugin : group.getPlugins()) { moduleVersions.put(group.getId() + ":" + plugin, getLaterVersionsForModule(group.getId(), plugin, libraryVersion)); } } List<DependencyVersion> allVersions = moduleVersions.values().stream().flatMap(SortedSet::stream).distinct() .filter((dependencyVersion) -> isPermitted(dependencyVersion, library.getProhibitedVersions())) .collect(Collectors.toList()); if (allVersions.isEmpty()) { return Collections.emptyList(); } return allVersions.stream() .map((version) -> new ResolvedVersionOption(version, getMissingModules(moduleVersions, version))) .collect(Collectors.toList()); } private List<VersionOption> determineAlignedVersionOption(Library library, Map<String, Library> libraries) { VersionOption alignedVersionOption = alignedVersionOption(library, libraries); if (alignedVersionOption == null) { return Collections.emptyList(); } if (!isPermitted(alignedVersionOption.version, library.getProhibitedVersions())) { throw new InvalidUserDataException("Version alignment failed. Version " + alignedVersionOption.version + " from " + library.getName() + " is prohibited"); } return Collections.singletonList(alignedVersionOption); } private VersionOption alignedVersionOption(Library library, Map<String, Library> libraries) { VersionAlignment versionAlignment = library.getVersion().getVersionAlignment(); Library alignmentLibrary = libraries.get(versionAlignment.getLibraryName()); DependencyVersions dependencyVersions = alignmentLibrary.getDependencyVersions(); if (dependencyVersions == null) { throw new InvalidUserDataException("Cannot align with library '" + versionAlignment.getLibraryName() + "' as it does not define any dependency versions"); } if (!dependencyVersions.available()) { return null; } Set<String> versions = new HashSet<>(); for (Group group : library.getGroups()) { for (Module module : group.getModules()) { String version = dependencyVersions.getVersion(group.getId(), module.getName()); if (version != null) { versions.add(version); } } } if (versions.isEmpty()) { throw new InvalidUserDataException("Cannot align with library '" + versionAlignment.getLibraryName() + "' as its dependency versions do not include any of this library's modules"); } if (versions.size() > 1) { throw new InvalidUserDataException("Cannot align with library '" + versionAlignment.getLibraryName() + "' as it uses multiple different versions of this library's modules"); } DependencyVersion version = DependencyVersion.parse(versions.iterator().next()); return library.getVersion().getVersion().equals(version) ? null : new AlignedVersionOption(version, alignmentLibrary); } private boolean isPermitted(DependencyVersion dependencyVersion, List<ProhibitedVersion> prohibitedVersions) { if (prohibitedVersions.isEmpty()) { return true; } for (ProhibitedVersion prohibitedVersion : prohibitedVersions) { if (prohibitedVersion.getRange() .containsVersion(new DefaultArtifactVersion(dependencyVersion.toString()))) { return false; } } return true; } private List<String> getMissingModules(Map<String, SortedSet<DependencyVersion>> moduleVersions, DependencyVersion version) { List<String> missingModules = new ArrayList<>(); moduleVersions.forEach((name, versions) -> { if (!versions.contains(version)) { missingModules.add(name); } }); return missingModules; } private SortedSet<DependencyVersion> getLaterVersionsForModule(String groupId, String artifactId, DependencyVersion currentVersion) { SortedSet<DependencyVersion> versions = this.versionResolver.resolveVersions(groupId, artifactId); versions.removeIf((candidate) -> !this.upgradePolicy.test(candidate, currentVersion)); return versions; } private static class VersionOption { private final DependencyVersion version; protected VersionOption(DependencyVersion version) { this.version = version; } @Override public String toString() { return this.version.toString(); } } private static final class AlignedVersionOption extends VersionOption { private final Library alignedWith; private AlignedVersionOption(DependencyVersion version, Library alignedWith) { super(version); this.alignedWith = alignedWith; } @Override public String toString() { return super.toString() + " (aligned with " + this.alignedWith.getName() + " " + this.alignedWith.getVersion().getVersion() + ")"; } } private static final class ResolvedVersionOption extends VersionOption { private final List<String> missingModules; private ResolvedVersionOption(DependencyVersion version, List<String> missingModules) { super(version); this.missingModules = missingModules; } @Override public String toString() { if (this.missingModules.isEmpty()) { return super.toString(); } return super.toString() + " (some modules are missing: " + StringUtils.collectionToDelimitedString(this.missingModules, ", ") + ")"; } } }
package com.planet_ink.coffee_mud.Races; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2012-2016 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Slime extends StdRace { @Override public String ID() { return "Slime"; } private final static String localizedStaticName = CMLib.lang().L("Slime"); @Override public String name() { return localizedStaticName; } @Override public int shortestMale() { return 24; } @Override public int shortestFemale() { return 24; } @Override public int heightVariance() { return 12; } @Override public int lightestWeight() { return 80; } @Override public int weightVariance() { return 80; } @Override public long forbiddenWornBits() { return 0; } private final static String localizedStaticRacialCat = CMLib.lang().L("Slime"); @Override public String racialCategory() { return localizedStaticRacialCat; } @Override public boolean fertile() { return false; } // an ey ea he ne ar ha to le fo no gi mo wa ta wi private static final int[] parts={-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1}; @Override public int[] bodyMask() { return parts; } private final int[] agingChart = { 0, 0, 0, 0, 0, YEARS_AGE_LIVES_FOREVER, YEARS_AGE_LIVES_FOREVER, YEARS_AGE_LIVES_FOREVER, YEARS_AGE_LIVES_FOREVER }; @Override public int[] getAgingChart() { return agingChart; } protected static Vector<RawMaterial> resources = new Vector<RawMaterial>(); @Override public int availabilityCode() { return Area.THEME_FANTASY | Area.THEME_SKILLONLYMASK; } @Override public void affectPhyStats(Physical affected, PhyStats affectableStats) { super.affectPhyStats(affected,affectableStats); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_SEE_DARK); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_TASTE); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_TRACK); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_HEAR); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_SMELL); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_SPEAK); affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_TASTE); } @Override public void affectCharStats(MOB affectedMOB, CharStats affectableStats) { affectableStats.setStat(CharStats.STAT_GENDER,'N'); affectableStats.setRacialStat(CharStats.STAT_INTELLIGENCE,1); affectableStats.setRacialStat(CharStats.STAT_WISDOM,1); affectableStats.setRacialStat(CharStats.STAT_CHARISMA,1); affectableStats.setStat(CharStats.STAT_SAVE_POISON,affectableStats.getStat(CharStats.STAT_SAVE_POISON)+100); affectableStats.setStat(CharStats.STAT_SAVE_COLD,affectableStats.getStat(CharStats.STAT_SAVE_COLD)-100); affectableStats.setStat(CharStats.STAT_SAVE_MIND,affectableStats.getStat(CharStats.STAT_SAVE_MIND)+100); affectableStats.setStat(CharStats.STAT_SAVE_GAS,affectableStats.getStat(CharStats.STAT_SAVE_GAS)+100); affectableStats.setStat(CharStats.STAT_SAVE_PARALYSIS,affectableStats.getStat(CharStats.STAT_SAVE_PARALYSIS)+100); affectableStats.setStat(CharStats.STAT_SAVE_UNDEAD,affectableStats.getStat(CharStats.STAT_SAVE_UNDEAD)+100); affectableStats.setStat(CharStats.STAT_SAVE_DISEASE,affectableStats.getStat(CharStats.STAT_SAVE_DISEASE)+100); } @Override public String arriveStr() { return "slides in"; } @Override public String leaveStr() { return "slides"; } @Override public Weapon myNaturalWeapon() { if(naturalWeapon==null) { naturalWeapon=CMClass.getWeapon("StdWeapon"); naturalWeapon.setName(L("a slimy protrusion")); naturalWeapon.setRanges(0,5); naturalWeapon.setMaterial(RawMaterial.RESOURCE_SLIME); naturalWeapon.setUsesRemaining(1000); naturalWeapon.setWeaponDamageType(Weapon.TYPE_MELTING); } return naturalWeapon; } @Override public String healthText(MOB viewer, MOB mob) { final double pct=(CMath.div(mob.curState().getHitPoints(),mob.maxState().getHitPoints())); if(pct<.10) return L("^r@x1^r is unstable and almost disintegrated!^N",mob.name(viewer)); else if(pct<.20) return L("^r@x1^r is nearing disintegration.^N",mob.name(viewer)); else if(pct<.30) return L("^r@x1^r is noticeably disintegrating.^N",mob.name(viewer)); else if(pct<.40) return L("^y@x1^y is very damaged and slightly disintegrated.^N",mob.name(viewer)); else if(pct<.50) return L("^y@x1^y is very damaged.^N",mob.name(viewer)); else if(pct<.60) return L("^p@x1^p is starting to show major damage.^N",mob.name(viewer)); else if(pct<.70) return L("^p@x1^p is definitely damaged.^N",mob.name(viewer)); else if(pct<.80) return L("^g@x1^g is disheveled and mildly damaged.^N",mob.name(viewer)); else if(pct<.90) return L("^g@x1^g is noticeably disheveled.^N",mob.name(viewer)); else if(pct<.99) return L("^g@x1^g is slightly disheveled.^N",mob.name(viewer)); else return L("^c@x1^c is in perfect condition.^N",mob.name(viewer)); } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if(myHost instanceof MOB) { if((msg.amITarget(myHost)) &&(msg.targetMinor()==CMMsg.TYP_DAMAGE) &&(msg.tool() instanceof Weapon) &&(msg.source()!=myHost) &&(msg.source().rangeToTarget()==0) &&(!((MOB)myHost).amDead())) { if(((((Weapon)msg.tool()).material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL) &&(msg.source().getVictim()==myHost) &&(((Weapon)msg.tool()).subjectToWearAndTear()) &&(CMLib.dice().rollPercentage()<20)) CMLib.combat().postItemDamage(msg.source(), (Item)msg.tool(), null, 10, CMMsg.TYP_ACID,"<T-NAME> sizzle(s)!"); if(((((Weapon)msg.tool()).weaponDamageType()==Weapon.TYPE_PIERCING)||(((Weapon)msg.tool()).weaponDamageType()==Weapon.TYPE_SHOOT)) &&(msg.value()>0)) msg.setValue((int)Math.round((msg.value())*.85)); } } } @Override public DeadBody getCorpseContainer(MOB mob, Room room) { final DeadBody body = super.getCorpseContainer(mob, room); if(body != null) { body.setMaterial(RawMaterial.RESOURCE_SLIME); } return body; } @Override public List<RawMaterial> myResources() { synchronized(resources) { if(resources.size()==0) { resources.addElement(makeResource (L("a @x1 bit",name().toLowerCase()),RawMaterial.RESOURCE_SLIME)); } } return resources; } }
/**************************************************************************** * Copyright (C) 2012 ecsec GmbH. * All rights reserved. * Contact: ecsec GmbH (info@ecsec.de) * * This file is part of the Open eCard App. * * GNU General Public License Usage * This file may be used under the terms of the GNU General Public * License version 3.0 as published by the Free Software Foundation * and appearing in the file LICENSE.GPL included in the packaging of * this file. Please review the following information to ensure the * GNU General Public License version 3.0 requirements will be met: * http://www.gnu.org/copyleft/gpl.html. * * Other Usage * Alternatively, this file may be used in accordance with the terms * and conditions contained in a signed written agreement between * you and ecsec GmbH. * ***************************************************************************/ package org.openecard.crypto.common.asn1.cvc; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implements a chain of Card Verifiable Certificates. * See BSI-TR-03110, version 2.10, part 3, section 2. * See BSI-TR-03110, version 2.10, part 3, section C. * * @author Moritz Horsch <horsch@cdc.informatik.tu-darmstadt.de> */ public class CardVerifiableCertificateChain { private static final Logger _logger = LoggerFactory.getLogger(CertificateDescription.class); private ArrayList<CardVerifiableCertificate> certs = new ArrayList<CardVerifiableCertificate>(); private ArrayList<CardVerifiableCertificate> cvcaCerts = new ArrayList<CardVerifiableCertificate>(); private ArrayList<CardVerifiableCertificate> dvCerts = new ArrayList<CardVerifiableCertificate>(); private ArrayList<CardVerifiableCertificate> terminalCerts = new ArrayList<CardVerifiableCertificate>(); /** * Creates a new certificate chain. * * @param certificates Certificates * @throws CertificateException */ public CardVerifiableCertificateChain(List<CardVerifiableCertificate> certificates) throws CertificateException { parseChain(certificates); // FIXME not working yet with all servers. // verify(); _logger.warn("Verification of the certificate chain is disabled."); } /** * Parses the certificate chain. * * @param certificates Certificates */ private void parseChain(List<CardVerifiableCertificate> certificates) throws CertificateException { for (CardVerifiableCertificate cvc : certificates) { if (containsChertificate(cvc)) { break; } CHAT.Role role = cvc.getCHAT().getRole(); if (role.equals(CHAT.Role.CVCA)) { cvcaCerts.add(cvc); certs.add(cvc); } else if (role.equals(CHAT.Role.DV_OFFICIAL) || role.equals(CHAT.Role.DV_NON_OFFICIAL)) { dvCerts.add(cvc); certs.add(cvc); } else if (role.equals(CHAT.Role.AUTHENTICATION_TERMINAL) || role.equals(CHAT.Role.INSPECTION_TERMINAL) || role.equals(CHAT.Role.SIGNATURE_TERMINAL)) { terminalCerts.add(cvc); certs.add(cvc); } else { throw new CertificateException("Malformed certificate."); } } } /** * Verifies the certificate chain. * [1] The CAR and the CHR of the CVCA certificates should be equal. * [2] The CAR of a DV certificate should refer to the CHR of a CVCA certificate. * [3] The CAR of a terminal certificate should refer to the CHR of a DV certificate. * * @throws CertificateException */ private void verify() throws CertificateException { verify(terminalCerts, dvCerts); verify(dvCerts, cvcaCerts); verify(cvcaCerts, cvcaCerts); } private void verify(List<CardVerifiableCertificate> authorities, List<CardVerifiableCertificate> holders) throws CertificateException { for (Iterator<CardVerifiableCertificate> ai = authorities.iterator(); ai.hasNext();) { CardVerifiableCertificate authority = ai.next(); for (Iterator<CardVerifiableCertificate> hi = holders.iterator(); hi.hasNext();) { CardVerifiableCertificate holder = hi.next(); if (authority.getCAR().equals(holder.getCHR())) { break; } if (!ai.hasNext()) { throw new CertificateException( "Malformed certificate chain: Cannot find a CHR for the CAR (" + authority.getCAR().toString() + ")."); } } } } /** * Checks if the certificate chain contains the given certificate. * * @param cvc Certificate * @return True if the chain contains the certificate, false otherwise */ public boolean containsChertificate(CardVerifiableCertificate cvc) { for (CardVerifiableCertificate c : certs) { if (c.compare(cvc)) { return true; } } return false; } /** * Adds a new certificate to the chain. * * @param certificate Certificate * @throws CertificateException */ public void addCertificate(final CardVerifiableCertificate certificate) throws CertificateException { parseChain(new LinkedList<CardVerifiableCertificate>() { { add(certificate); } }); } /** * Adds new certificates to the chain. * * @param certificates Certificate * @throws CertificateException */ public void addCertificates(ArrayList<CardVerifiableCertificate> certificates) throws CertificateException { parseChain(certificates); } /** * Returns the certificates of the Country Verifying CAs (CVCA). * * @return CVCA certificates */ public List<CardVerifiableCertificate> getCVCACertificates() { return cvcaCerts; } /** * Returns the certificates of the Document Verifiers (DV). * * @return DV certificates */ public List<CardVerifiableCertificate> getDVCertificates() { return dvCerts; } /** * Returns the certificates of the terminal. * * @return Terminal certificates */ public List<CardVerifiableCertificate> getTerminalCertificates() { return terminalCerts; } /** * Returns the certificate chain. * * @return Certificate chain */ public List<CardVerifiableCertificate> getCertificates() { return certs; } /** * Returns the certificate chain from the CAR. * * @param car Certification Authority Reference (CAR) * @return Certificate chain * @throws CertificateException */ public CardVerifiableCertificateChain getCertificateChainFromCAR(byte[] car) throws CertificateException { return getCertificateChainFromCAR(new PublicKeyReference(car)); } /** * Returns the certificate chain from the CAR. * * @param car Certification Authority Reference (CAR) * @return Certificate chain * @throws CertificateException */ public CardVerifiableCertificateChain getCertificateChainFromCAR(PublicKeyReference car) throws CertificateException { List<CardVerifiableCertificate> certChain = buildChain(certs, car); return new CardVerifiableCertificateChain(certChain); } private ArrayList<CardVerifiableCertificate> buildChain(ArrayList<CardVerifiableCertificate> certs, PublicKeyReference car) { ArrayList<CardVerifiableCertificate> certChain = new ArrayList<CardVerifiableCertificate>(); for (CardVerifiableCertificate c : certs) { if (c.getCAR().compare(car)) { certChain.add(c); certChain.addAll(buildChain(certs, c.getCHR())); } } return certChain; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.text; import java.text.Normalizer; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.pdmodel.font.PDFont; import org.apache.pdfbox.util.Matrix; /** * This represents a string and a position on the screen of those characters. * * @author Ben Litchfield */ public final class TextPosition { private static final Log LOG = LogFactory.getLog(TextPosition.class); private static final Map<Integer, String> DIACRITICS = createDiacritics(); // Adds non-decomposing diacritics to the hash with their related combining character. // These are values that the unicode spec claims are equivalent but are not mapped in the form // NFKC normalization method. Determined by going through the Combining Diacritical Marks // section of the Unicode spec and identifying which characters are not mapped to by the // normalization. private static Map<Integer, String> createDiacritics() { Map<Integer, String> map = new HashMap<Integer, String>(31); map.put(0x0060, "\u0300"); map.put(0x02CB, "\u0300"); map.put(0x0027, "\u0301"); map.put(0x02B9, "\u0301"); map.put(0x02CA, "\u0301"); map.put(0x005e, "\u0302"); map.put(0x02C6, "\u0302"); map.put(0x007E, "\u0303"); map.put(0x02C9, "\u0304"); map.put(0x00B0, "\u030A"); map.put(0x02BA, "\u030B"); map.put(0x02C7, "\u030C"); map.put(0x02C8, "\u030D"); map.put(0x0022, "\u030E"); map.put(0x02BB, "\u0312"); map.put(0x02BC, "\u0313"); map.put(0x0486, "\u0313"); map.put(0x055A, "\u0313"); map.put(0x02BD, "\u0314"); map.put(0x0485, "\u0314"); map.put(0x0559, "\u0314"); map.put(0x02D4, "\u031D"); map.put(0x02D5, "\u031E"); map.put(0x02D6, "\u031F"); map.put(0x02D7, "\u0320"); map.put(0x02B2, "\u0321"); map.put(0x02CC, "\u0329"); map.put(0x02B7, "\u032B"); map.put(0x02CD, "\u0331"); map.put(0x005F, "\u0332"); map.put(0x204E, "\u0359"); return map; } // text matrix for the start of the text object, coordinates are in display units // and have not been adjusted private final Matrix textMatrix; // ending X and Y coordinates in display units private final float endX; private final float endY; private final float maxHeight; // maximum height of text, in display units private final int rotation; // 0, 90, 180, 270 degrees of page rotation private final float x; private final float y; private final float pageHeight; private final float pageWidth; private final float widthOfSpace; // width of a space, in display units private final int[] charCodes; // internal PDF character codes private final PDFont font; private final float fontSize; private final int fontSizePt; // mutable private float[] widths; private String unicode; private float direction = -1; /** * Constructor. * * @param pageRotation rotation of the page that the text is located in * @param pageWidth rotation of the page that the text is located in * @param pageHeight rotation of the page that the text is located in * @param textMatrix TextMatrix for start of text (in display units) * @param endX x coordinate of the end position * @param endY y coordinate of the end position * @param maxHeight Maximum height of text (in display units) * @param individualWidth The width of the given character/string. (in text units) * @param spaceWidth The width of the space character. (in display units) * @param unicode The string of Unicode characters to be displayed. * @param charCodes An array of the internal PDF character codes for the glyphs in this text. * @param font The current font for this text position. * @param fontSize The new font size. * @param fontSizeInPt The font size in pt units. */ public TextPosition(int pageRotation, float pageWidth, float pageHeight, Matrix textMatrix, float endX, float endY, float maxHeight, float individualWidth, float spaceWidth, String unicode, int[] charCodes, PDFont font, float fontSize, int fontSizeInPt) { this.textMatrix = textMatrix; this.endX = endX; this.endY = endY; int rotationAngle = pageRotation; this.rotation = rotationAngle; this.maxHeight = maxHeight; this.pageHeight = pageHeight; this.pageWidth = pageWidth; this.widths = new float[] { individualWidth }; this.widthOfSpace = spaceWidth; this.unicode = unicode; this.charCodes = charCodes; this.font = font; this.fontSize = fontSize; this.fontSizePt = fontSizeInPt; x = getXRot(rotationAngle); if (rotationAngle == 0 || rotationAngle == 180) { y = this.pageHeight - getYLowerLeftRot(rotationAngle); } else { y = this.pageWidth - getYLowerLeftRot(rotationAngle); } } /** * Return the string of characters stored in this object. * * @return The string on the screen. */ public String getUnicode() { return unicode; } /** * Return the internal PDF character codes of the glyphs in this text. * * @return an array of internal PDF character codes */ public int[] getCharacterCodes() { return charCodes; } /** * Return the text matrix stored in this object. * * @return The Matrix containing the starting text position */ public Matrix getTextMatrix() { return textMatrix; } /** * Return the direction/orientation of the string in this object based on its text matrix. * @return The direction of the text (0, 90, 180, or 270) */ public float getDir() { if (direction < 0) { float a = textMatrix.getScaleY(); float b = textMatrix.getShearY(); float c = textMatrix.getShearX(); float d = textMatrix.getScaleX(); // 12 0 left to right // 0 12 if (a > 0 && Math.abs(b) < d && Math.abs(c) < a && d > 0) { direction = 0; } // -12 0 right to left (upside down) // 0 -12 else if (a < 0 && Math.abs(b) < Math.abs(d) && Math.abs(c) < Math.abs(a) && d < 0) { direction = 180; } // 0 12 up // -12 0 else if (Math.abs(a) < Math.abs(c) && b > 0 && c < 0 && Math.abs(d) < b) { direction = 90; } // 0 -12 down // 12 0 else if (Math.abs(a) < c && b < 0 && c > 0 && Math.abs(d) < Math.abs(b)) { direction = 270; } else { direction = 0; } } return direction; } /** * Return the X starting coordinate of the text, adjusted by the given rotation amount. * The rotation adjusts where the 0,0 location is relative to the text. * * @param rotation Rotation to apply (0, 90, 180, or 270). 0 will perform no adjustments. * @return X coordinate */ private float getXRot(float rotation) { if (rotation == 0) { return textMatrix.getTranslateX(); } else if (rotation == 90) { return textMatrix.getTranslateY(); } else if (rotation == 180) { return pageWidth - textMatrix.getTranslateX(); } else if (rotation == 270) { return pageHeight - textMatrix.getTranslateY(); } return 0; } /** * This will get the page rotation adjusted x position of the character. * This is adjusted based on page rotation so that the upper left is 0,0. * * @return The x coordinate of the character. */ public float getX() { return x; } /** * This will get the text direction adjusted x position of the character. * This is adjusted based on text direction so that the first character * in that direction is in the upper left at 0,0. * * @return The x coordinate of the text. */ public float getXDirAdj() { return getXRot(getDir()); } /** * This will get the y position of the character with 0,0 in lower left. * This will be adjusted by the given rotation. * * @param rotation Rotation to apply to text to adjust the 0,0 location (0,90,180,270) * @return The y coordinate of the text */ private float getYLowerLeftRot(float rotation) { if (rotation == 0) { return textMatrix.getTranslateY(); } else if (rotation == 90) { return pageWidth - textMatrix.getTranslateX(); } else if (rotation == 180) { return pageHeight - textMatrix.getTranslateY(); } else if (rotation == 270) { return textMatrix.getTranslateX(); } return 0; } /** * This will get the y position of the text, adjusted so that 0,0 is upper left and it is * adjusted based on the page rotation. * * @return The adjusted y coordinate of the character. */ public float getY() { return y; } /** * This will get the y position of the text, adjusted so that 0,0 is upper left and it is * adjusted based on the text direction. * * @return The adjusted y coordinate of the character. */ public float getYDirAdj() { float dir = getDir(); // some PDFBox code assumes that the 0,0 point is in upper left, not lower left if (dir == 0 || dir == 180) { return pageHeight - getYLowerLeftRot(dir); } else { return pageWidth - getYLowerLeftRot(dir); } } /** * Get the length or width of the text, based on a given rotation. * * @param rotation Rotation that was used to determine coordinates (0,90,180,270) * @return Width of text in display units */ private float getWidthRot(float rotation) { if (rotation == 90 || rotation == 270) { return Math.abs(endY - textMatrix.getTranslateY()); } else { return Math.abs(endX - textMatrix.getTranslateX()); } } /** * This will get the width of the string when page rotation adjusted coordinates are used. * * @return The width of the text in display units. */ public float getWidth() { return getWidthRot(rotation); } /** * This will get the width of the string when text direction adjusted coordinates are used. * * @return The width of the text in display units. */ public float getWidthDirAdj() { return getWidthRot(getDir()); } /** * This will get the maximum height of all characters in this string. * * @return The maximum height of all characters in this string. */ public float getHeight() { return maxHeight; } /** * This will get the maximum height of all characters in this string. * * @return The maximum height of all characters in this string. */ public float getHeightDir() { // this is not really a rotation-dependent calculation, but this is defined for symmetry return maxHeight; } /** * This will get the font size that this object is suppose to be drawn at. * * @return The font size. */ public float getFontSize() { return fontSize; } /** * This will get the font size in pt. To get this size we have to multiply the pdf-fontsize * and the scaling from the textmatrix * * @return The font size in pt. */ public float getFontSizeInPt() { return fontSizePt; } /** * This will get the font for the text being drawn. * * @return The font size. */ public PDFont getFont() { return font; } /** * This will get the width of a space character. This is useful for some algorithms such as the * text stripper, that need to know the width of a space character. * * @return The width of a space character. */ public float getWidthOfSpace() { return widthOfSpace; } /** * @return Returns the xScale. */ public float getXScale() { return textMatrix.getScalingFactorX(); } /** * @return Returns the yScale. */ public float getYScale() { return textMatrix.getScalingFactorY(); } /** * Get the widths of each individual character. * * @return An array that is the same length as the length of the string. */ public float[] getIndividualWidths() { return widths; } /** * Determine if this TextPosition logically contains another (i.e. they overlap and should be * rendered on top of each other). * * @param tp2 The other TestPosition to compare against * @return True if tp2 is contained in the bounding box of this text. */ public boolean contains(TextPosition tp2) { double thisXstart = getXDirAdj(); double thisWidth = getWidthDirAdj(); double thisXend = thisXstart + thisWidth; double tp2Xstart = tp2.getXDirAdj(); double tp2Xend = tp2Xstart + tp2.getWidthDirAdj(); // no X overlap at all so return as soon as possible if (tp2Xend <= thisXstart || tp2Xstart >= thisXend) { return false; } // no Y overlap at all so return as soon as possible. Note: 0.0 is in the upper left and // y-coordinate is top of TextPosition double thisYstart = getYDirAdj(); double tp2Ystart = tp2.getYDirAdj(); if (tp2Ystart + tp2.getHeightDir() < thisYstart || tp2Ystart > thisYstart + getHeightDir()) { return false; } // we're going to calculate the percentage of overlap, if its less than a 15% x-coordinate // overlap then we'll return false because its negligible, .15 was determined by trial and // error in the regression test files else if (tp2Xstart > thisXstart && tp2Xend > thisXend) { double overlap = thisXend - tp2Xstart; double overlapPercent = overlap/thisWidth; return overlapPercent > .15; } else if (tp2Xstart < thisXstart && tp2Xend < thisXend) { double overlap = tp2Xend - thisXstart; double overlapPercent = overlap/thisWidth; return overlapPercent > .15; } return true; } /** * Merge a single character TextPosition into the current object. This is to be used only for * cases where we have a diacritic that overlaps an existing TextPosition. In a graphical * display, we could overlay them, but for text extraction we need to merge them. Use the * contains() method to test if two objects overlap. * * @param diacritic TextPosition to merge into the current TextPosition. */ public void mergeDiacritic(TextPosition diacritic) { if (diacritic.getUnicode().length() > 1) { return; } float diacXStart = diacritic.getXDirAdj(); float diacXEnd = diacXStart + diacritic.widths[0]; float currCharXStart = getXDirAdj(); int strLen = unicode.length(); boolean wasAdded = false; for (int i = 0; i < strLen && !wasAdded; i++) { if (i >= widths.length) { LOG.info("diacritic " + diacritic.getUnicode() + " on ligature " + unicode + " is not supported yet and is ignored (PDFBOX-2831)"); break; } float currCharXEnd = currCharXStart + widths[i]; // this is the case where there is an overlap of the diacritic character with the // current character and the previous character. If no previous character, just append // the diacritic after the current one if (diacXStart < currCharXStart && diacXEnd <= currCharXEnd) { if (i == 0) { insertDiacritic(i, diacritic); } else { float distanceOverlapping1 = diacXEnd - currCharXStart; float percentage1 = distanceOverlapping1/widths[i]; float distanceOverlapping2 = currCharXStart - diacXStart; float percentage2 = distanceOverlapping2/widths[i - 1]; if (percentage1 >= percentage2) { insertDiacritic(i, diacritic); } else { insertDiacritic(i - 1, diacritic); } } wasAdded = true; } // diacritic completely covers this character and therefore we assume that this is the // character the diacritic belongs to else if (diacXStart < currCharXStart && diacXEnd > currCharXEnd) { insertDiacritic(i, diacritic); wasAdded = true; } // otherwise, The diacritic modifies this character because its completely // contained by the character width else if (diacXStart >= currCharXStart && diacXEnd <= currCharXEnd) { insertDiacritic(i, diacritic); wasAdded = true; } // last character in the TextPosition so we add diacritic to the end else if (diacXStart >= currCharXStart && diacXEnd > currCharXEnd && i == strLen - 1) { insertDiacritic(i, diacritic); wasAdded = true; } // couldn't find anything useful so we go to the next character in the TextPosition currCharXStart += widths[i]; } } /** * Inserts the diacritic TextPosition to the str of this TextPosition and updates the widths * array to include the extra character width. * * @param i current character * @param diacritic The diacritic TextPosition */ private void insertDiacritic(int i, TextPosition diacritic) { StringBuilder sb = new StringBuilder(); sb.append(unicode.substring(0, i)); float[] widths2 = new float[widths.length + 1]; System.arraycopy(widths, 0, widths2, 0, i); // Unicode combining diacritics always go after the base character, regardless of whether // the string is in presentation order or logical order sb.append(unicode.charAt(i)); widths2[i] = widths[i]; sb.append(combineDiacritic(diacritic.getUnicode())); widths2[i + 1] = 0; // get the rest of the string sb.append(unicode.substring(i + 1, unicode.length())); System.arraycopy(widths, i + 1, widths2, i + 2, widths.length - i - 1); unicode = sb.toString(); widths = widths2; } /** * Combine the diacritic, for example, convert non-combining diacritic characters to their * combining counterparts. * * @param str String to normalize * @return Normalized string */ private String combineDiacritic(String str) { // Unicode contains special combining forms of the diacritic characters which we want to use int codePoint = str.codePointAt(0); // convert the characters not defined in the Unicode spec if (DIACRITICS.containsKey(codePoint)) { return DIACRITICS.get(codePoint); } else { return Normalizer.normalize(str, Normalizer.Form.NFKC).trim(); } } /** * @return True if the current character is a diacritic char. */ public boolean isDiacritic() { String text = this.getUnicode(); if (text.length() != 1) { return false; } int type = Character.getType(text.charAt(0)); return type == Character.NON_SPACING_MARK || type == Character.MODIFIER_SYMBOL || type == Character.MODIFIER_LETTER; } /** * Show the string data for this text position. * * @return A human readable form of this object. */ @Override public String toString() { return getUnicode(); } }
package edu.emory.cci.aiw.i2b2etl.ksb; /* * #%L * Protempa BioPortal Knowledge Source Backend * %% * Copyright (C) 2012 - 2014 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.protempa.PropositionDefinition; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.lang3.ArrayUtils; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.protempa.PropertyDefinitionBuilder; import org.protempa.valueset.ValueSet; /** * */ public class I2b2KnowledgeSourceBackendTest extends AbstractKSBTest { private static final String ICD9_250_ID = "ICD9:250"; @Test public void testReadPropositionDefinitionNotNull() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertNotNull(propDef); } @Test public void testReadPropositionsDefinitionNotNull() throws Exception { List<PropositionDefinition> propDefs = readPropositionDefinitions(new String[]{ICD9_250_ID}); assertEquals(1, propDefs.size()); } @Test public void testReadPropositionDefinitionId() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertEquals("ICD9:250", propDef.getId()); } @Test public void testReadPropositionDefinitionDisplayName() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertEquals("Diabetes mellitus due to insulin receptor antibodies", propDef.getDisplayName()); } @Test public void testReadPropositionDefinitionAbbrevDisplayName() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertEquals("", propDef.getAbbreviatedDisplayName()); } @Test public void testReadPropositionDefinitionInDataSource() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertTrue(propDef.getInDataSource()); } @Test public void testReadPropositionDefinitionInverseIsA() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); assertEqualsStrings("/truth/testReadPropositionDefinitionInverseIsA", propDef.getChildren()); } @Test public void testReadPropositionDefinitionPropertyDefs() throws Exception { PropositionDefinition propDef = readPropositionDefinition(ICD9_250_ID); Set<PropertyDefinitionBuilder> actual = collectPropertyDefinitionBuilders(propDef); assertEqualsSetOfObjects("/truth/testReadPropositionDefinitionPropertyDefs.xml", actual); } @Test public void testReadIsA() throws Exception { String[] isa = readIsA(ICD9_250_ID + ".1"); assertArrayEquals(new String[]{ICD9_250_ID}, isa); } @Test public void testGetKnowledgeSourceSearchResults() throws Exception { assertEqualsStrings("/truth/testGetKnowledgeSourceSearchResults", getKnowledgeSourceSearchResults("diabetes")); } @Test public void testCollectPropIdDescendantsUsingAllNarrower() throws Exception { assertEqualsStrings( "/truth/testCollectPropIdDescendantsUsingAllNarrower", collectPropIdDescendantsUsingAllNarrower(false, new String[]{"ICD9:Procedures"})); } @Test public void testCollectPropPropDefDescendantsUsingAllNarrower() throws Exception { Set<PropositionDefinition> actualPropDef = new HashSet<>(collectPropDefDescendantsUsingAllNarrower(false, new String[]{"ICD9:Procedures"})); assertEqualsStrings( "/truth/testCollectPropDefDescendantsUsingAllNarrower", toPropId(actualPropDef)); } @Test public void testCollectPropDefDescendantsUsingAllNarrowerInDataSource() throws Exception { Set<PropositionDefinition> actualPropDef = new HashSet<>(collectPropDefDescendantsUsingAllNarrower(true, new String[]{"ICD9:Procedures"})); assertEqualsStrings( "/truth/testCollectPropDefDescendantsUsingAllNarrowerInDataSource", toPropId(actualPropDef)); } @Test public void testCollectPropDefDescendants285_22() throws Exception { Set<PropositionDefinition> actualPropDef = new HashSet<>(collectPropDefDescendantsUsingAllNarrower(true, new String[]{"ICD9:285.22"})); Set<PropertyDefinitionBuilder> actual = collectPropertyDefinitionBuilders(actualPropDef); assertEqualsSetOfObjects("/truth/testCollectPropDefDescendants285.xml", actual); } @Test public void testCollectPropIdDescendantsUsingInverseIsA() throws Exception { assertEqualsStrings( "/truth/testCollectPropIdDescendantsUsingInverseIsA", collectPropIdDescendantsUsingInverseIsA(new String[]{"LAB:LabTest"})); } @Test public void testCollectPropDefDescendantsUsingInverseIsA() throws Exception { Set<PropositionDefinition> actualPropDef = new HashSet<>(collectPropDefDescendantsUsingInverseIsA(new String[]{"LAB:LabTest"})); assertEqualsStrings("/truth/testCollectPropDefDescendantsUsingInverseIsA", toPropId(actualPropDef)); } @Test public void testCollectPropDefDescendantsUsingUsingAllNarrowerProperties() throws Exception { Collection<PropositionDefinition> collectPropDefDescendantsUsingAllNarrower = collectPropDefDescendantsUsingAllNarrower(false, new String[]{"ICD9:Diagnoses"}); Set<PropertyDefinitionBuilder> actual = collectPropertyDefinitionBuilders(collectPropDefDescendantsUsingAllNarrower); assertEqualsSetOfObjects("/truth/testCollectPropDefDescendantsUsingAllNarrowerProperties.xml", actual); } @Test public void testCollectPropDefDescendantsUsingUsingAllNarrowerInDataSourceProperties() throws Exception { Collection<PropositionDefinition> collectPropDefDescendantsUsingAllNarrowerInDataSource = collectPropDefDescendantsUsingAllNarrower(true, new String[]{"ICD9:Diagnoses"}); Set<PropertyDefinitionBuilder> actual = collectPropertyDefinitionBuilders(collectPropDefDescendantsUsingAllNarrowerInDataSource); assertEqualsSetOfObjects("/truth/testCollectPropDefDescendantsUsingAllNarrowerInDataSourceProperties.xml", actual); } @Test public void testCollectPropDefDescendantsUsingInverseIsAProperties() throws Exception { Collection<PropositionDefinition> collectPropDefDescendantsUsingInverseIsA = collectPropDefDescendantsUsingInverseIsA(new String[]{"ICD9:Diagnoses"}); Set<PropertyDefinitionBuilder> actual = collectPropertyDefinitionBuilders(collectPropDefDescendantsUsingInverseIsA); assertEqualsSetOfObjects("/truth/testCollectPropDefDescendantsUsingInverseIsAProperties.xml", actual); } @Test public void testReadValueSetExistsCMetadataXML() throws Exception { ValueSet valueSet = readValueSet("ICD9:Diagnoses^DXPRIORITY"); assertNotNull(valueSet); } @Test public void testReadValueSetExistsNoCMetadataXML() throws Exception { ValueSet valueSet = readValueSet("ICD9:Diagnoses^DXSOURCE"); assertNotNull(valueSet); } @Test public void testReadValueSetContentCMetadataXML() throws Exception { ValueSet actual = readValueSet("ICD9:Diagnoses^DXPRIORITY"); assertEqualsObjects("/truth/testReadValueSetContentCMetadataXML.xml", actual.asBuilder()); } @Test public void testReadValueSetContentNoCMetadataXML() throws Exception { ValueSet actual = readValueSet("ICD9:Diagnoses^DXSOURCE"); assertEqualsObjects("/truth/testReadValueSetContentNoCMetadataXML.xml", actual.asBuilder()); } @Test public void testReadAbstractionDefinition() throws Exception { assertNull(readAbstractionDefinition(ICD9_250_ID)); } @Test public void testReadContextDefinition() throws Exception { assertNull(readContextDefinition(ICD9_250_ID)); } @Test public void testReadTemporalPropositionDefinition() throws Exception { assertNotNull(readTemporalPropositionDefinition(ICD9_250_ID)); } @Test public void testReadAbstractedInto() throws Exception { assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, readAbstractedInto(ICD9_250_ID)); } @Test public void testReadInduces() throws Exception { assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, readInduces(ICD9_250_ID)); } @Test public void testReadSubContextsOf() throws Exception { assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, readSubContextOfs(ICD9_250_ID)); } }
/* * Copyright (c) 2015, the Dart project authors. * * Licensed under the Eclipse Public License v1.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.eclipse.org/legal/epl-v10.html * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.dartlang.vm.service; import com.google.common.collect.Maps; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import de.roderick.weberknecht.WebSocket; import de.roderick.weberknecht.WebSocketEventHandler; import de.roderick.weberknecht.WebSocketException; import de.roderick.weberknecht.WebSocketMessage; import org.dartlang.vm.service.consumer.*; import org.dartlang.vm.service.element.*; import org.dartlang.vm.service.internal.RequestSink; import org.dartlang.vm.service.internal.VmServiceConst; import org.dartlang.vm.service.internal.WebSocketRequestSink; import org.dartlang.vm.service.logging.Logging; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * Internal {@link VmService} base class containing non-generated code. */ @SuppressWarnings({"unused", "WeakerAccess"}) abstract class VmServiceBase implements VmServiceConst { /** * Connect to the VM observatory service via the specified URI * * @return an API object for interacting with the VM service (not {@code null}). */ public static VmService connect(final String url) throws IOException { // Validate URL URI uri; try { uri = new URI(url); } catch (URISyntaxException e) { throw new IOException("Invalid URL: " + url, e); } String wsScheme = uri.getScheme(); if (!"ws".equals(wsScheme) && !"wss".equals(wsScheme)) { throw new IOException("Unsupported URL scheme: " + wsScheme); } // Create web socket and observatory WebSocket webSocket; try { webSocket = new WebSocket(uri); } catch (WebSocketException e) { throw new IOException("Failed to create websocket: " + url, e); } final VmService vmService = new VmService(); // Setup event handler for forwarding responses webSocket.setEventHandler(new WebSocketEventHandler() { @Override public void onClose() { Logging.getLogger().logInformation("VM connection closed: " + url); vmService.connectionClosed(); } @Override public void onMessage(WebSocketMessage message) { Logging.getLogger().logInformation("VM message: " + message.getText()); try { vmService.processMessage(message.getText()); } catch (Exception e) { Logging.getLogger().logError(e.getMessage(), e); } } @Override public void onOpen() { vmService.connectionOpened(); Logging.getLogger().logInformation("VM connection open: " + url); } @Override public void onPing() { } @Override public void onPong() { } }); // Establish WebSocket Connection //noinspection TryWithIdenticalCatches try { webSocket.connect(); } catch (WebSocketException e) { throw new IOException("Failed to connect: " + url, e); } catch (ArrayIndexOutOfBoundsException e) { // The weberknecht can occasionally throw an array index exception if a connect terminates on initial connect // (de.roderick.weberknecht.WebSocket.connect, WebSocket.java:126). throw new IOException("Failed to connect: " + url, e); } vmService.requestSink = new WebSocketRequestSink(webSocket); // Check protocol version final CountDownLatch latch = new CountDownLatch(1); final String[] errMsg = new String[1]; vmService.getVersion(new VersionConsumer() { @Override public void onError(RPCError error) { String msg = "Failed to determine protocol version: " + error.getCode() + "\n message: " + error.getMessage() + "\n details: " + error.getDetails(); Logging.getLogger().logInformation(msg); errMsg[0] = msg; } @Override public void received(Version response) { int major = response.getMajor(); int minor = response.getMinor(); if (major != VmService.versionMajor || minor != VmService.versionMinor) { if (major == 2 || major == 3) { Logging.getLogger().logInformation( "Difference in protocol version: client=" + VmService.versionMajor + "." + VmService.versionMinor + " vm=" + major + "." + minor); } else { String msg = "Incompatible protocol version: client=" + VmService.versionMajor + "." + VmService.versionMinor + " vm=" + major + "." + minor; Logging.getLogger().logError(msg); errMsg[0] = msg; } } latch.countDown(); } }); try { if (!latch.await(5, TimeUnit.SECONDS)) { throw new IOException("Failed to determine protocol version"); } if (errMsg[0] != null) { throw new IOException(errMsg[0]); } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for response", e); } return vmService; } /** * Connect to the VM observatory service on the given local port. * * @return an API object for interacting with the VM service (not {@code null}). * * @deprecated prefer the Url based constructor {@link VmServiceBase#connect} */ @Deprecated public static VmService localConnect(int port) throws IOException { return connect("ws://localhost:" + port + "/ws"); } /** * A mapping between {@link String} ids' and the associated {@link Consumer} that was passed when * the request was made. Synchronize against {@link #consumerMapLock} before accessing this field. */ private final Map<String, Consumer> consumerMap = Maps.newHashMap(); /** * The object used to synchronize access to {@link #consumerMap}. */ private final Object consumerMapLock = new Object(); /** * The unique ID for the next request. */ private final AtomicInteger nextId = new AtomicInteger(); /** * A list of objects to which {@link Event}s from the VM are forwarded. */ private final List<VmServiceListener> vmListeners = new ArrayList<>(); /** * A list of objects to which {@link Event}s from the VM are forwarded. */ private final Map<String, RemoteServiceRunner> remoteServiceRunners = Maps.newHashMap(); /** * The channel through which observatory requests are made. */ RequestSink requestSink; /** * Add a listener to receive {@link Event}s from the VM. */ public void addVmServiceListener(VmServiceListener listener) { vmListeners.add(listener); } /** * Remove the given listener from the VM. */ public void removeVmServiceListener(VmServiceListener listener) { vmListeners.remove(listener); } /** * Add a VM RemoteServiceRunner. */ public void addServiceRunner(String service, RemoteServiceRunner runner) { remoteServiceRunners.put(service, runner); } /** * Remove a VM RemoteServiceRunner. */ public void removeServiceRunner(String service) { remoteServiceRunners.remove(service); } /** * Disconnect from the VM observatory service. */ public void disconnect() { requestSink.close(); } /** * Return the instance with the given identifier. */ public void getInstance(String isolateId, String instanceId, final GetInstanceConsumer consumer) { getObject(isolateId, instanceId, new GetObjectConsumer() { @Override public void onError(RPCError error) { consumer.onError(error); } @Override public void received(Obj response) { if (response instanceof Instance) { consumer.received((Instance) response); } else { onError(RPCError.unexpected("Instance", response)); } } @Override public void received(Sentinel response) { onError(RPCError.unexpected("Instance", response)); } }); } /** * Return the library with the given identifier. */ public void getLibrary(String isolateId, String libraryId, final GetLibraryConsumer consumer) { getObject(isolateId, libraryId, new GetObjectConsumer() { @Override public void onError(RPCError error) { consumer.onError(error); } @Override public void received(Obj response) { if (response instanceof Library) { consumer.received((Library) response); } else { onError(RPCError.unexpected("Library", response)); } } @Override public void received(Sentinel response) { onError(RPCError.unexpected("Library", response)); } }); } public abstract void getObject(String isolateId, String objectId, GetObjectConsumer consumer); /** * Invoke a specific service protocol extension method. * <p> * See https://api.dartlang.org/stable/dart-developer/dart-developer-library.html. */ public void callServiceExtension(String isolateId, String method, ServiceExtensionConsumer consumer) { JsonObject params = new JsonObject(); params.addProperty("isolateId", isolateId); request(method, params, consumer); } /** * Invoke a specific service protocol extension method. * <p> * See https://api.dartlang.org/stable/dart-developer/dart-developer-library.html. */ public void callServiceExtension(String isolateId, String method, JsonObject params, ServiceExtensionConsumer consumer) { params.addProperty("isolateId", isolateId); request(method, params, consumer); } /** * Sends the request and associates the request with the passed {@link Consumer}. */ protected void request(String method, JsonObject params, Consumer consumer) { // Assemble the request String id = Integer.toString(nextId.incrementAndGet()); JsonObject request = new JsonObject(); request.addProperty(JSONRPC, JSONRPC_VERSION); request.addProperty(ID, id); request.addProperty(METHOD, method); request.add(PARAMS, params); // Cache the consumer to receive the response synchronized (consumerMapLock) { consumerMap.put(id, consumer); } // Send the request requestSink.add(request); } public void connectionOpened() { for (VmServiceListener listener : new ArrayList<>(vmListeners)) { try { listener.connectionOpened(); } catch (Exception e) { Logging.getLogger().logError("Exception notifying listener", e); } } } private void forwardEvent(String streamId, Event event) { for (VmServiceListener listener : new ArrayList<>(vmListeners)) { try { listener.received(streamId, event); } catch (Exception e) { Logging.getLogger().logError("Exception processing event: " + streamId + ", " + event.getJson(), e); } } } public void connectionClosed() { for (VmServiceListener listener : new ArrayList<>(vmListeners)) { try { listener.connectionClosed(); } catch (Exception e) { Logging.getLogger().logError("Exception notifying listener", e); } } } abstract void forwardResponse(Consumer consumer, String type, JsonObject json); void logUnknownResponse(Consumer consumer, JsonObject json) { Class<? extends Consumer> consumerClass = consumer.getClass(); StringBuilder msg = new StringBuilder(); msg.append("Expected response for ").append(consumerClass).append("\n"); for (Class<?> interf : consumerClass.getInterfaces()) { msg.append(" implementing ").append(interf).append("\n"); } msg.append(" but received ").append(json); Logging.getLogger().logError(msg.toString()); } /** * Process the response from the VM service and forward that response to the consumer associated * with the response id. */ void processMessage(String jsonText) { if (jsonText == null || jsonText.isEmpty()) { return; } // Decode the JSON JsonObject json; try { json = (JsonObject) new JsonParser().parse(jsonText); } catch (Exception e) { Logging.getLogger().logError("Parse message failed: " + jsonText, e); return; } if (json.has("method")) { if (!json.has(PARAMS)) { final String message = "Missing " + PARAMS; Logging.getLogger().logError(message); final JsonObject response = new JsonObject(); response.addProperty(JSONRPC, JSONRPC_VERSION); final JsonObject error = new JsonObject(); error.addProperty(CODE, INVALID_REQUEST); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); return; } if (json.has("id")) { processRequest(json); } else { processNotification(json); } } else if (json.has("result") || json.has("error")) { processResponse(json); } else { Logging.getLogger().logError("Malformed message"); } } void processRequest(JsonObject json) { final JsonObject response = new JsonObject(); response.addProperty(JSONRPC, JSONRPC_VERSION); // Get the consumer associated with this request String id; try { id = json.get(ID).getAsString(); } catch (Exception e) { final String message = "Request malformed " + ID; Logging.getLogger().logError(message, e); final JsonObject error = new JsonObject(); error.addProperty(CODE, INVALID_REQUEST); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); return; } response.addProperty(ID, id); String method; try { method = json.get(METHOD).getAsString(); } catch (Exception e) { final String message = "Request malformed " + METHOD; Logging.getLogger().logError(message, e); final JsonObject error = new JsonObject(); error.addProperty(CODE, INVALID_REQUEST); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); return; } JsonObject params; try { params = json.get(PARAMS).getAsJsonObject(); } catch (Exception e) { final String message = "Request malformed " + METHOD; Logging.getLogger().logError(message, e); final JsonObject error = new JsonObject(); error.addProperty(CODE, INVALID_REQUEST); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); return; } if (!remoteServiceRunners.containsKey(method)) { final String message = "Unknown service " + method; Logging.getLogger().logError(message); final JsonObject error = new JsonObject(); error.addProperty(CODE, METHOD_NOT_FOUND); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); return; } final RemoteServiceRunner runner = remoteServiceRunners.get(method); try { runner.run(params, new RemoteServiceCompleter() { public void result(JsonObject result) { response.add(RESULT, result); requestSink.add(response); } public void error(int code, String message, JsonObject data) { final JsonObject error = new JsonObject(); error.addProperty(CODE, code); error.addProperty(MESSAGE, message); if (data != null) { error.add(DATA, data); } response.add(ERROR, error); requestSink.add(response); } }); } catch (Exception e) { final String message = "Internal Server Error"; Logging.getLogger().logError(message, e); final JsonObject error = new JsonObject(); error.addProperty(CODE, SERVER_ERROR); error.addProperty(MESSAGE, message); response.add(ERROR, error); requestSink.add(response); } } private static final RemoteServiceCompleter ignoreCallback = new RemoteServiceCompleter() { public void result(JsonObject result) { // ignore } public void error(int code, String message, JsonObject data) { // ignore } }; void processNotification(JsonObject json) { String method; try { method = json.get(METHOD).getAsString(); } catch (Exception e) { Logging.getLogger().logError("Request malformed " + METHOD, e); return; } JsonObject params; try { params = json.get(PARAMS).getAsJsonObject(); } catch (Exception e) { Logging.getLogger().logError("Event missing " + PARAMS, e); return; } if ("streamNotify".equals(method)) { String streamId; try { streamId = params.get(STREAM_ID).getAsString(); } catch (Exception e) { Logging.getLogger().logError("Event missing " + STREAM_ID, e); return; } Event event; try { event = new Event(params.get(EVENT).getAsJsonObject()); } catch (Exception e) { Logging.getLogger().logError("Event missing " + EVENT, e); return; } forwardEvent(streamId, event); } else { if (!remoteServiceRunners.containsKey(method)) { Logging.getLogger().logError("Unknown service " + method); return; } final RemoteServiceRunner runner = remoteServiceRunners.get(method); try { runner.run(params, ignoreCallback); } catch (Exception e) { Logging.getLogger().logError("Internal Server Error", e); } } } void processResponse(JsonObject json) { JsonElement idElem = json.get(ID); if (idElem == null) { Logging.getLogger().logError("Response missing " + ID); return; } // Get the consumer associated with this response String id; try { id = idElem.getAsString(); } catch (Exception e) { Logging.getLogger().logError("Response missing " + ID, e); return; } Consumer consumer = consumerMap.remove(id); if (consumer == null) { Logging.getLogger().logError("No consumer associated with " + ID + ": " + id); return; } // Forward the response if the request was successfully executed JsonElement resultElem = json.get(RESULT); if (resultElem != null) { JsonObject result; try { result = resultElem.getAsJsonObject(); } catch (Exception e) { Logging.getLogger().logError("Response has invalid " + RESULT, e); return; } String responseType; try { responseType = result.get(TYPE).getAsString(); } catch (Exception e) { Logging.getLogger().logError("Response missing " + TYPE, e); return; } forwardResponse(consumer, responseType, result); return; } // Forward an error if the request failed resultElem = json.get(ERROR); if (resultElem != null) { JsonObject error; try { error = resultElem.getAsJsonObject(); } catch (Exception e) { Logging.getLogger().logError("Response has invalid " + RESULT, e); return; } consumer.onError(new RPCError(error)); return; } Logging.getLogger().logError("Response missing " + RESULT + " and " + ERROR); } }