gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.pen; import java.io.IOException; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.pig.data.DataAtom; import org.apache.pig.data.DataBag; import org.apache.pig.data.Datum; import org.apache.pig.data.Tuple; import org.apache.pig.impl.logicalLayer.LogicalOperator; import org.apache.pig.impl.logicalLayer.LogicalPlan; import org.apache.pig.impl.logicalLayer.OperatorKey; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.logicalLayer.schema.TupleSchema; import org.apache.pig.impl.util.IdentityHashSet; public class DisplayExamples { public static StringBuffer Result = new StringBuffer(); public static final int MAX_DATAATOM_LENGTH = 15; public static String PrintTabular(LogicalPlan lp, Map<LogicalOperator, DataBag> exampleData) { StringBuffer output = new StringBuffer(); LogicalOperator currentOp = lp.getRootOperator(); PrintTabular(currentOp, exampleData, output); return output.toString(); } static void PrintTabular(LogicalOperator op, Map<LogicalOperator, DataBag> exampleData, StringBuffer output) { DataBag bag = exampleData.get(op); List<OperatorKey> inputs = op.getInputs(); for(OperatorKey opKey : inputs) { PrintTabular(op.getOpTable().get(opKey), exampleData, output); } if(op.getAlias() != null) { //PrintTable(op, bag, output); DisplayTable(MakeArray(op, bag), op, bag, output); } } static String AddSpaces(int n, boolean printSpace) { StringBuffer str = new StringBuffer(); for(int i = 0; i < n; ++i) { if(printSpace) str.append(" "); else str.append("-"); } return str.toString(); } static void DisplayTable(String[][] table, LogicalOperator op, DataBag bag, StringBuffer output) { int cols = op.outputSchema().getFields().size(); List<Schema> fields = op.outputSchema().fields; int rows = (int)bag.size(); int [] maxColSizes = new int[cols]; for(int i = 0; i < cols; ++i) { maxColSizes[i] = fields.get(i).toString().length(); if(maxColSizes[i] < 5) maxColSizes[i] = 5; } int total = 0; int aliasLength = op.getAlias().length() + 4; for(int j = 0; j < cols; ++j) { for(int i = 0; i < rows; ++i) { int length = table[i][j].length(); if(length > maxColSizes[j]) maxColSizes[j] = length; } total += maxColSizes[j]; } //Display the schema first output.append(AddSpaces(total + 3*(cols +1) + aliasLength + 1, false) + "\n"); output.append("| " + op.getAlias() + AddSpaces(4, true) + " | "); for(int i = 0; i < cols; ++i) { String field = fields.get(i).toString(); output.append(field + AddSpaces(maxColSizes[i] - field.length(), true) + " | "); } output.append("\n" + AddSpaces(total + 3*(cols +1) + aliasLength + 1, false) + "\n"); //now start displaying the data for(int i = 0; i < rows; ++i) { output.append("| " + AddSpaces(aliasLength, true) + " | "); for(int j = 0; j < cols; ++j) { String str = table[i][j]; output.append(str + AddSpaces(maxColSizes[j] - str.length(), true) + " | "); } output.append("\n"); } //now display the finish line output.append(AddSpaces(total + 3*(cols +1) + aliasLength + 1, false) + "\n"); } static String[][] MakeArray(LogicalOperator op, DataBag bag) { int rows = (int)bag.size(); int cols = op.outputSchema().getFields().size(); String[][] table = new String [rows][cols]; Iterator<Tuple> it = bag.iterator(); for(int i = 0; i < rows; ++i) { Tuple t = it.next(); for(int j = 0; j < cols; ++j) { table[i][j] = ShortenField(t.getField(j)); } } return table; } static String ShortenField(Datum d) { if(d instanceof Tuple) return ShortenField((Tuple)d); else if(d instanceof DataBag) return ShortenField((DataBag)d); else if(d instanceof DataAtom) return ShortenField((DataAtom)d); else { System.out.println("Wrong data-type received!!!"); return null; } } static String ShortenField(DataAtom da) { int length = da.toString().length(); if(length > MAX_DATAATOM_LENGTH) { return new String(da.toString().substring(0, 4) + " ... " + da.toString().substring(length - 4, length - 1)); } return da.toString(); } static String ShortenField(DataBag bag) { StringBuffer str = new StringBuffer(); long size = bag.size(); str.append("{"); if(size > 3) { Iterator<Tuple> it = bag.iterator(); str.append(ShortenField(it.next())); while(it.hasNext()) { Tuple t = it.next(); if(!it.hasNext()) { str.append(", ..., " + ShortenField(t)); } } } else { for(Iterator<Tuple> it = bag.iterator(); it.hasNext(); ) { Tuple t = it.next(); if(it.hasNext()) { str.append(ShortenField(t) + ", "); } else str.append(ShortenField(t)); } } str.append("}"); return str.toString(); } static String ShortenField(Tuple t) { StringBuffer str = new StringBuffer(); int noFields = t.arity(); str.append("("); if(noFields > 3) { Datum d = t.getField(0); str.append(ShortenField(d) + ", ..., "); d = t.getField(noFields - 1); str.append(ShortenField(d)); } else { for(int i = 0; i < noFields; ++i) { Datum d; d = t.getField(i); if(i != (noFields - 1)) { str.append(ShortenField(d) + ", "); } else { str.append(ShortenField(d)); } } } str.append(")"); return str.toString(); } }
package com.planet_ink.coffee_mud.Abilities.Skills; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Skill_TwoWeaponFighting extends StdSkill { @Override public String ID() { return "Skill_TwoWeaponFighting"; } private final static String localizedName = CMLib.lang().L("Two Weapon Fighting"); @Override public String name() { return localizedName; } @Override public String displayText() { return ""; } @Override protected int canAffectCode() { return CAN_MOBS; } @Override protected int canTargetCode() { return 0; } @Override public int abstractQuality() { return Ability.QUALITY_BENEFICIAL_SELF; } @Override public int classificationCode() { return Ability.ACODE_SKILL|Ability.DOMAIN_MARTIALLORE; } @Override public boolean isAutoInvoked() { return true; } @Override public boolean canBeUninvoked() { return false; } protected volatile boolean attackedSinceLastTick=false; protected Weapon getFirstWeapon(final MOB mob) { final Item I=mob.fetchWieldedItem(); if(I instanceof Weapon) return (Weapon)I; return null; } protected Weapon getSecondWeapon(final MOB mob) { final Item I=mob.fetchHeldItem(); if((I instanceof Weapon)&&(!I.amWearingAt(Wearable.WORN_WIELD))) return (Weapon)I; return null; } @Override public void affectPhyStats(final Physical affected, final PhyStats affectableStats) { if(affected instanceof MOB) { final MOB mob=(MOB)affected; if((getSecondWeapon(mob)!=null)&&(getFirstWeapon(mob)!=null)&&(mob.isInCombat())) { final int xlvl=super.getXLEVELLevel(invoker()); affectableStats.setSpeed(affectableStats.speed()+(CMProps.getSpeedAdjustment()*0.1*xlvl)); affectableStats.setAttackAdjustment(affectableStats.attackAdjustment()-(affectableStats.attackAdjustment()/(5+xlvl))); affectableStats.setDamage(affectableStats.damage()-(affectableStats.damage()/(20+xlvl))); } } } @Override public void executeMsg(final Environmental host, final CMMsg msg) { if((msg.source()==affected) &&(msg.target() instanceof MOB) &&(msg.sourceMinor()==CMMsg.TYP_WEAPONATTACK)) attackedSinceLastTick=true; super.executeMsg(host, msg); } @Override public int abilityCode() { return 0; } @Override public boolean tick(final Tickable ticking, final int tickID) { if((tickID==Tickable.TICKID_MOB)&&(affected instanceof MOB)) { final MOB mob=(MOB)affected; if(mob != null) { final MOB victiM=mob.getVictim(); if(victiM != null) { if(mob.isAttributeSet(MOB.Attrib.AUTODRAW)) CMLib.commands().postDraw(mob,true,true); final Item primaryWeapon=getFirstWeapon(mob); final Item weapon=getSecondWeapon(mob); if((weapon!=null) // try to wield anything! &&(primaryWeapon!=null) &&attackedSinceLastTick &&(mob.rangeToTarget()>=0) &&(mob.rangeToTarget()>=weapon.minRange()) &&(mob.rangeToTarget()<=weapon.maxRange()) &&(CMLib.flags().isAliveAwakeMobileUnbound(mob,true)) &&(!mob.amDead()) &&(mob.curState().getHitPoints()>0) &&(CMLib.flags().isStanding(mob)) &&(!victiM.amDead())) { Ability usedA=this; for(final Enumeration<Ability> a=mob.effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A instanceof Skill_TwoWeaponFighting) &&(A.abilityCode()==1)) usedA=A; } if(usedA.proficiencyCheck(mob,0,false) ||((usedA!=this)&&(proficiencyCheck(mob,0,false)))) { primaryWeapon.setRawWornCode(Wearable.WORN_HELD); weapon.setRawWornCode(Wearable.WORN_WIELD); mob.recoverPhyStats(); CMLib.combat().postAttack(mob,victiM,weapon); weapon.setRawWornCode(Wearable.WORN_HELD); primaryWeapon.setRawWornCode(Wearable.WORN_WIELD); mob.recoverPhyStats(); if(CMLib.dice().rollPercentage()<3) { usedA.helpProficiency(mob, 0); helpProficiency(mob, 0); } } } } } attackedSinceLastTick=false; } return super.tick(ticking,tickID); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.gridtable; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.Random; import java.util.SortedMap; import java.util.TreeMap; import org.apache.kylin.common.util.Bytes; import org.apache.kylin.measure.MeasureAggregator; import org.apache.kylin.measure.basic.BigDecimalSumAggregator; import org.apache.kylin.measure.basic.DoubleSumAggregator; import org.apache.kylin.measure.basic.LongSumAggregator; import org.apache.kylin.measure.bitmap.BitmapAggregator; import org.apache.kylin.measure.bitmap.BitmapCounter; import org.apache.kylin.measure.bitmap.BitmapCounterFactory; import org.apache.kylin.measure.bitmap.RoaringBitmapCounterFactory; import org.apache.kylin.measure.hllc.HLLCAggregator; import org.apache.kylin.measure.hllc.HLLCounter; import org.github.jamm.MemoryMeter; import org.junit.Test; import com.google.common.base.Stopwatch; public class AggregationCacheMemSizeTest { private static final MemoryMeter meter = new MemoryMeter(); private static final BitmapCounterFactory bitmapFactory = RoaringBitmapCounterFactory.INSTANCE; private static final BitmapCounter[] bitmaps = new BitmapCounter[5]; private static final Random random = new Random(); // consider bitmaps with variant cardinality static { for (int i = 0; i < bitmaps.length; i++) { bitmaps[i] = bitmapFactory.newBitmap(); } final int totalBits = 1_000_000; // case 0: sparse, low-cardinality bitmap for (int i = 0; i < 100; i++) { bitmaps[0].add(random.nextInt(totalBits)); } // case 1: 20% full bitmap for (int i = 0; i < totalBits; i++) { if (random.nextInt(100) < 20) { bitmaps[1].add(i); } } // case 2: half full bitmap for (int i = 0; i < totalBits; i++) { if (random.nextInt(100) < 50) { bitmaps[2].add(i); } } // case 3: 80% full bitmap for (int i = 0; i < totalBits; i++) { if (random.nextInt(100) < 80) { bitmaps[3].add(i); } } // case 4: dense, high-cardinality bitmap for (int i = 0; i < totalBits; i++) { if (random.nextInt(totalBits) < 100) { continue; } bitmaps[4].add(i); } } enum Settings { WITHOUT_MEM_HUNGRY, // only test basic aggrs WITH_HLLC, // basic aggrs + hllc WITH_LOW_CARD_BITMAP, // basic aggrs + bitmap WITH_HIGH_CARD_BITMAP // basic aggrs + bitmap } private MeasureAggregator<?>[] createNoMemHungryAggrs() { LongSumAggregator longSum = new LongSumAggregator(); longSum.aggregate(new Long(10)); DoubleSumAggregator doubleSum = new DoubleSumAggregator(); doubleSum.aggregate(new Double(10)); BigDecimalSumAggregator decimalSum = new BigDecimalSumAggregator(); decimalSum.aggregate(new BigDecimal("12345678901234567890.123456789")); return new MeasureAggregator[] { longSum, doubleSum, decimalSum }; } private HLLCAggregator createHLLCAggr() { HLLCAggregator hllcAggregator = new HLLCAggregator(14); hllcAggregator.aggregate(new HLLCounter(14)); return hllcAggregator; } private BitmapAggregator createBitmapAggr(boolean lowCardinality) { BitmapCounter counter = bitmapFactory.newBitmap(); counter.orWith(lowCardinality ? bitmaps[0] : bitmaps[3]); BitmapAggregator result = new BitmapAggregator(); result.aggregate(counter); return result; } private MeasureAggregator<?>[] createAggrs(Settings settings) { List<MeasureAggregator<?>> aggregators = new ArrayList<>(); aggregators.addAll(Arrays.asList(createNoMemHungryAggrs())); switch (settings) { case WITHOUT_MEM_HUNGRY: break; case WITH_HLLC: aggregators.add(createHLLCAggr()); break; case WITH_LOW_CARD_BITMAP: aggregators.add(createBitmapAggr(true)); break; case WITH_HIGH_CARD_BITMAP: aggregators.add(createBitmapAggr(false)); break; default: break; } return aggregators.toArray(new MeasureAggregator[aggregators.size()]); } @Test public void testEstimateBitmapMemSize() { BitmapAggregator[] bitmapAggrs = new BitmapAggregator[bitmaps.length]; for (int i = 0; i < bitmapAggrs.length; i++) { bitmapAggrs[i] = new BitmapAggregator(); bitmapAggrs[i].aggregate(bitmaps[i]); } System.out.printf(Locale.ROOT, "%-15s %-10s %-10s\n", "cardinality", "estimate", "actual"); for (BitmapAggregator aggr : bitmapAggrs) { System.out.printf(Locale.ROOT, "%-15d %-10d %-10d\n", aggr.getState().getCount(), aggr.getMemBytesEstimate(), meter.measureDeep(aggr)); } } @Test public void testEstimateMemSize() throws InterruptedException { int scale = Integer.parseInt(System.getProperty("scale", "1")); scale = Math.max(1, Math.min(10, scale)); testSetting(Settings.WITHOUT_MEM_HUNGRY, scale * 100000); testSetting(Settings.WITH_HLLC, scale * 5000); testSetting(Settings.WITH_LOW_CARD_BITMAP, scale * 10000); testSetting(Settings.WITH_HIGH_CARD_BITMAP, scale * 1000); } private void testSetting(Settings settings, int inputCount) { SortedMap<byte[], Object> map = new TreeMap<>(new Comparator<byte[]>() { @Override public int compare(byte[] o1, byte[] o2) { return Bytes.compareTo(o1, o2); } }); final int reportInterval = inputCount / 10; final Stopwatch stopwatch = new Stopwatch(); long estimateMillis = 0; long actualMillis = 0; System.out.println("Settings: " + settings); System.out.printf(Locale.ROOT, "%15s %15s %15s %15s %15s\n", "Size", "Estimate(bytes)", "Actual(bytes)", "Estimate(ms)", "Actual(ms)"); for (int i = 0; i < inputCount; i++) { byte[] key = new byte[10]; random.nextBytes(key); MeasureAggregator[] values = createAggrs(settings); map.put(key, values); if ((i + 1) % reportInterval == 0) { stopwatch.start(); long estimateBytes = GTAggregateScanner.estimateSizeOfAggrCache(key, values, map.size()); estimateMillis += stopwatch.elapsedMillis(); stopwatch.reset(); stopwatch.start(); long actualBytes = meter.measureDeep(map); actualMillis += stopwatch.elapsedMillis(); stopwatch.reset(); System.out.printf(Locale.ROOT, "%,15d %,15d %,15d %,15d %,15d\n", map.size(), estimateBytes, actualBytes, estimateMillis, actualMillis); } } System.out.println("---------------------------------------\n"); map = null; System.gc(); } }
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala.compat.java8.runtime; import org.junit.Assert; import org.junit.Assume; import org.junit.Test; import java.io.Serializable; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.SerializedLambda; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; public final class LambdaDeserializerTest { private LambdaHost lambdaHost = new LambdaHost(); // We skip most tests on Java 17+ because of https://github.com/scala/bug/issues/12419 // which we only fixed for 2.12+ @Test public void serializationPrivate() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F1<Boolean, String> f1 = lambdaHost.lambdaBackedByPrivateImplMethod(); Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); } @Test public void serializationStatic() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod(); Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); } @Test public void serializationVirtualMethodReference() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F1<Boolean, String> f1 = lambdaHost.lambdaBackedByVirtualMethodReference(); Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); } @Test public void serializationInterfaceMethodReference() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F1<I, Object> f1 = lambdaHost.lambdaBackedByInterfaceMethodReference(); I i = new I() { }; Assert.assertEquals(f1.apply(i), reconstitute(f1).apply(i)); } @Test public void serializationStaticMethodReference() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticMethodReference(); Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); } @Test public void serializationNewInvokeSpecial() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall(); Assert.assertEquals(f1.apply(), reconstitute(f1).apply()); } @Test public void uncached() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall(); F0<Object> reconstituted1 = reconstitute(f1); F0<Object> reconstituted2 = reconstitute(f1); Assert.assertNotEquals(reconstituted1.getClass(), reconstituted2.getClass()); } @Test public void cached() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); HashMap<String, MethodHandle> cache = new HashMap<>(); F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall(); F0<Object> reconstituted1 = reconstitute(f1, cache); F0<Object> reconstituted2 = reconstitute(f1, cache); Assert.assertEquals(reconstituted1.getClass(), reconstituted2.getClass()); } @Test public void cachedStatic() { Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); HashMap<String, MethodHandle> cache = new HashMap<>(); F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod(); // Check that deserialization of a static lambda always returns the // same instance. Assert.assertSame(reconstitute(f1, cache), reconstitute(f1, cache)); // (as is the case with regular invocation.) Assert.assertSame(f1, lambdaHost.lambdaBackedByStaticImplMethod()); } @Test public void implMethodNameChanged() { F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod(); SerializedLambda sl = writeReplace(f1); checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName() + "___", sl.getImplMethodSignature())); } @Test public void implMethodSignatureChanged() { F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod(); SerializedLambda sl = writeReplace(f1); checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName(), sl.getImplMethodSignature().replace("Boolean", "Integer"))); } private void checkIllegalAccess(SerializedLambda serialized) { try { LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, serialized); throw new AssertionError(); } catch (IllegalArgumentException iae) { if (!iae.getMessage().contains("Illegal lambda deserialization")) { Assert.fail("Unexpected message: " + iae.getMessage()); } } } private SerializedLambda copySerializedLambda(SerializedLambda sl, String implMethodName, String implMethodSignature) { Object[] captures = new Object[sl.getCapturedArgCount()]; for (int i = 0; i < captures.length; i++) { captures[i] = sl.getCapturedArg(i); } return new SerializedLambda(loadClass(sl.getCapturingClass()), sl.getFunctionalInterfaceClass(), sl.getFunctionalInterfaceMethodName(), sl.getFunctionalInterfaceMethodSignature(), sl.getImplMethodKind(), sl.getImplClass(), implMethodName, implMethodSignature, sl.getInstantiatedMethodType(), captures); } private Class<?> loadClass(String className) { try { return Class.forName(className.replace('/', '.')); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } private <A, B> A reconstitute(A f1) { return reconstitute(f1, null); } @SuppressWarnings("unchecked") private <A, B> A reconstitute(A f1, java.util.HashMap<String, MethodHandle> cache) { try { return (A) LambdaDeserializer.deserializeLambda(LambdaHost.lookup(), cache, writeReplace(f1)); } catch (Exception e) { throw new RuntimeException(e); } } private <A> SerializedLambda writeReplace(A f1) { try { Method writeReplace = f1.getClass().getDeclaredMethod("writeReplace"); writeReplace.setAccessible(true); return (SerializedLambda) writeReplace.invoke(f1); } catch (Exception e) { throw new RuntimeException(e); } } } interface F1<A, B> extends Serializable { B apply(A a); } interface F0<A> extends Serializable { A apply(); } class LambdaHost { public F1<Boolean, String> lambdaBackedByPrivateImplMethod() { int local = 42; return (b) -> Arrays.asList(local, b ? "true" : "false", LambdaHost.this).toString(); } @SuppressWarnings("Convert2MethodRef") public F1<Boolean, String> lambdaBackedByStaticImplMethod() { return (b) -> String.valueOf(b); } public F1<Boolean, String> lambdaBackedByStaticMethodReference() { return String::valueOf; } public F1<Boolean, String> lambdaBackedByVirtualMethodReference() { return Object::toString; } public F1<I, Object> lambdaBackedByInterfaceMethodReference() { return I::i; } public F0<Object> lambdaBackedByConstructorCall() { return String::new; } public static MethodHandles.Lookup lookup() { return MethodHandles.lookup(); } } interface I { default String i() { return "i"; }; }
package org.apache.ddlutils.platform.postgresql; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Iterator; import java.util.Map; import org.apache.commons.beanutils.DynaBean; import org.apache.ddlutils.DatabaseOperationException; import org.apache.ddlutils.PlatformInfo; import org.apache.ddlutils.alteration.AddColumnChange; import org.apache.ddlutils.alteration.ModelComparator; import org.apache.ddlutils.alteration.RemoveColumnChange; import org.apache.ddlutils.alteration.TableChange; import org.apache.ddlutils.alteration.TableDefinitionChangesPredicate; import org.apache.ddlutils.dynabean.SqlDynaProperty; import org.apache.ddlutils.model.Column; import org.apache.ddlutils.model.Database; import org.apache.ddlutils.model.Table; import org.apache.ddlutils.platform.CreationParameters; import org.apache.ddlutils.platform.DefaultTableDefinitionChangesPredicate; import org.apache.ddlutils.platform.PlatformImplBase; /** * The platform implementation for PostgresSql. * * @version $Revision: 231306 $ */ public class PostgreSqlPlatform extends PlatformImplBase { /** Database name of this platform. */ public static final String DATABASENAME = "PostgreSql"; /** The standard PostgreSQL jdbc driver. */ public static final String JDBC_DRIVER = "org.postgresql.Driver"; /** The subprotocol used by the standard PostgreSQL driver. */ public static final String JDBC_SUBPROTOCOL = "postgresql"; /** * Creates a new platform instance. */ public PostgreSqlPlatform() { PlatformInfo info = getPlatformInfo(); info.setPrimaryKeyColumnAutomaticallyRequired(true); // this is the default length though it might be changed when building PostgreSQL // in file src/include/postgres_ext.h info.setMaxIdentifierLength(31); info.addNativeTypeMapping(Types.ARRAY, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BINARY, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BIT, "BOOLEAN"); info.addNativeTypeMapping(Types.BLOB, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BOOLEAN, "BOOLEAN", Types.BIT); info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR); info.addNativeTypeMapping(Types.DATALINK, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.DECIMAL, "NUMERIC", Types.NUMERIC); info.addNativeTypeMapping(Types.DISTINCT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION"); info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE); info.addNativeTypeMapping(Types.JAVA_OBJECT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.LONGVARBINARY, "BYTEA"); info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT", Types.LONGVARCHAR); info.addNativeTypeMapping(Types.NULL, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.OTHER, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.REF, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.STRUCT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT); info.addNativeTypeMapping(Types.VARBINARY, "BYTEA", Types.LONGVARBINARY); info.setDefaultSize(Types.CHAR, 254); info.setDefaultSize(Types.VARCHAR, 254); // no support for specifying the size for these types (because they are mapped // to BYTEA which back-maps to BLOB) info.setHasSize(Types.BINARY, false); info.setHasSize(Types.VARBINARY, false); setSqlBuilder(new PostgreSqlBuilder(this)); setModelReader(new PostgreSqlModelReader(this)); } /** * {@inheritDoc} */ public String getName() { return DATABASENAME; } /** * Creates or drops the database referenced by the given connection url. * * @param jdbcDriverClassName The jdbc driver class name * @param connectionUrl The url to connect to the database if it were already created * @param username The username for creating the database * @param password The password for creating the database * @param parameters Additional parameters for the operation * @param createDb Whether to create or drop the database */ private void createOrDropDatabase(String jdbcDriverClassName, String connectionUrl, String username, String password, Map parameters, boolean createDb) throws DatabaseOperationException, UnsupportedOperationException { if (JDBC_DRIVER.equals(jdbcDriverClassName)) { int slashPos = connectionUrl.lastIndexOf('/'); if (slashPos < 0) { throw new DatabaseOperationException("Cannot parse the given connection url "+connectionUrl); } int paramPos = connectionUrl.lastIndexOf('?'); String baseDb = connectionUrl.substring(0, slashPos + 1) + "template1"; String dbName = (paramPos > slashPos ? connectionUrl.substring(slashPos + 1, paramPos) : connectionUrl.substring(slashPos + 1)); Connection connection = null; Statement stmt = null; StringBuffer sql = new StringBuffer(); sql.append(createDb ? "CREATE" : "DROP"); sql.append(" DATABASE "); sql.append(dbName); if ((parameters != null) && !parameters.isEmpty()) { for (Iterator it = parameters.entrySet().iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry)it.next(); sql.append(" "); sql.append(entry.getKey().toString()); if (entry.getValue() != null) { sql.append(" "); sql.append(entry.getValue().toString()); } } } if (getLog().isDebugEnabled()) { getLog().debug("About to create database via "+baseDb+" using this SQL: "+sql.toString()); } try { Class.forName(jdbcDriverClassName); connection = DriverManager.getConnection(baseDb, username, password); stmt = connection.createStatement(); stmt.execute(sql.toString()); logWarnings(connection); } catch (Exception ex) { throw new DatabaseOperationException("Error while trying to " + (createDb ? "create" : "drop") + " a database: "+ex.getLocalizedMessage(), ex); } finally { if (stmt != null) { try { stmt.close(); } catch (SQLException ex) {} } if (connection != null) { try { connection.close(); } catch (SQLException ex) {} } } } else { throw new UnsupportedOperationException("Unable to " + (createDb ? "create" : "drop") + " a PostgreSQL database via the driver "+jdbcDriverClassName); } } /** * {@inheritDoc} */ public void createDatabase(String jdbcDriverClassName, String connectionUrl, String username, String password, Map parameters) throws DatabaseOperationException, UnsupportedOperationException { // With PostgreSQL, you create a database by executing "CREATE DATABASE" in an existing database (usually // the template1 database because it usually exists) createOrDropDatabase(jdbcDriverClassName, connectionUrl, username, password, parameters, true); } /** * {@inheritDoc} */ public void dropDatabase(String jdbcDriverClassName, String connectionUrl, String username, String password) throws DatabaseOperationException, UnsupportedOperationException { // With PostgreSQL, you create a database by executing "DROP DATABASE" in an existing database (usually // the template1 database because it usually exists) createOrDropDatabase(jdbcDriverClassName, connectionUrl, username, password, null, false); } /** * {@inheritDoc} */ protected void setObject(PreparedStatement statement, int sqlIndex, DynaBean dynaBean, SqlDynaProperty property) throws SQLException { int typeCode = property.getColumn().getTypeCode(); Object value = dynaBean.get(property.getName()); // PostgreSQL doesn't like setNull for BYTEA columns if (value == null) { switch (typeCode) { case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: statement.setBytes(sqlIndex, null); break; default: statement.setNull(sqlIndex, typeCode); break; } } else { super.setObject(statement, sqlIndex, dynaBean, property); } } /** * {@inheritDoc} */ protected ModelComparator getModelComparator() { ModelComparator comparator = super.getModelComparator(); comparator.setCanDropPrimaryKeyColumns(false); return comparator; } /** * {@inheritDoc} */ protected TableDefinitionChangesPredicate getTableDefinitionChangesPredicate() { return new DefaultTableDefinitionChangesPredicate() { protected boolean isSupported(Table intermediateTable, TableChange change) { if (change instanceof RemoveColumnChange) { return true; } else if (change instanceof AddColumnChange) { AddColumnChange addColumnChange = (AddColumnChange)change; // We can only handle this if // * the column is not set to NOT NULL (the constraint would be applied immediately // which will not work if there is already data in the table) // * the column has no default value (it would be applied after the change which // means that PostgreSQL would behave differently from other databases where the // default is applied to every column) // * the column is added at the end of the table (PostgreSQL does not support // insertion of a column) return !addColumnChange.getNewColumn().isRequired() && (addColumnChange.getNewColumn().getDefaultValue() == null) && (addColumnChange.getNextColumn() == null); } else { // TODO: PK changes ? return false; } } }; } /** * Processes the removal of a column from a table. * * @param currentModel The current database schema * @param params The parameters used in the creation of new tables. Note that for existing * tables, the parameters won't be applied * @param change The change object */ public void processChange(Database currentModel, CreationParameters params, RemoveColumnChange change) throws IOException { Table changedTable = findChangedTable(currentModel, change); Column removedColumn = changedTable.findColumn(change.getChangedColumn(), isDelimitedIdentifierModeOn()); ((PostgreSqlBuilder)getSqlBuilder()).dropColumn(changedTable, removedColumn); change.apply(currentModel, isDelimitedIdentifierModeOn()); } }
/* * Copyright 2015 Torridity. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tor.tribes.types; import de.tor.tribes.control.ManageableType; import de.tor.tribes.io.DataHolder; import de.tor.tribes.io.TroopAmountFixed; import de.tor.tribes.io.UnitHolder; import de.tor.tribes.php.LuckViewInterface; import de.tor.tribes.php.UnitTableInterface; import de.tor.tribes.types.ext.Barbarians; import de.tor.tribes.types.ext.InvalidTribe; import de.tor.tribes.types.ext.Tribe; import de.tor.tribes.types.ext.Village; import de.tor.tribes.util.BBSupport; import de.tor.tribes.util.BuildingSettings; import de.tor.tribes.util.TimeManager; import de.tor.tribes.util.translation.TranslationManager; import de.tor.tribes.util.translation.Translator; import de.tor.tribes.util.xml.JDomUtils; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jdom2.Document; import org.jdom2.Element; /** * * @author Torridity */ public class FightReport extends ManageableType implements Comparable<FightReport>, BBSupport { private Logger logger = LogManager.getLogger("FightReport"); private static Translator trans = TranslationManager.getTranslator("types.FightReport"); private final static String[] VARIABLES = new String[]{"%ATTACKER%", "%SOURCE%", "%DEFENDER%", "%TARGET%", "%SEND_TIME%", "%RESULT%", "%LUCK%", "%MORALE%", "%ATTACKER_TROOPS%", "%DEFENDER_TROOPS%", "%DEFENDERS_OUTSIDE%", "%DEFENDERS_EN_ROUTE%", "%LOYALITY_CHANGE%", "%WALL_CHANGE%", "%BUILDING_CHANGE%"}; public static String getStandardTemplate() { return trans.get("standard_template"); } @Override public String[] getBBVariables() { return VARIABLES; } @Override public String[] getReplacements(boolean pExtended) { String attackerVal = attacker.toBBCode(); String targetVal = targetVillage.toBBCode(); SimpleDateFormat d = TimeManager.getSimpleDateFormat("dd.MM.yy HH:mm:ss"); String sendDateVal = d.format(new Date(timestamp)); String resultVal = (won) ? trans.get("attacker_won") : trans.get("defender_won"); NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); String luckVal = "[img]" + LuckViewInterface.createLuckIndicator(luck) + "[/img] " + nf.format(luck) + "%"; nf.setMinimumFractionDigits(0); nf.setMaximumFractionDigits(0); String moraleVal = nf.format(moral) + " %"; String sourceVal = sourceVillage.toBBCode(); String attackerTroopsVal = (areAttackersHidden()) ? trans.get("hidden_by_owner") : "[img]" + UnitTableInterface.createAttackerUnitTableLink(attackers, diedAttackers) + "[/img]"; String defenderVal = defender.toBBCode(); String defenderTroopsVal = (wasLostEverything()) ? trans.get("nobody_survived") : "[img]" + UnitTableInterface.createDefenderUnitTableLink(defenders, diedDefenders) + "[/img]"; String troopsEnRouteVal = (whereDefendersOnTheWay()) ? trans.get("troops_otw") + "\n\n" + "[img]" + UnitTableInterface.createAttackerUnitTableLink(defendersOnTheWay) + "[/img]" : ""; String troopsOutsideVal = ""; if (whereDefendersOutside()) { Set<Village> targetKeys = defendersOutside.keySet(); for (Village target: targetKeys) { troopsOutsideVal += target.toBBCode() + "\n\n"; troopsOutsideVal += "[img]" + UnitTableInterface.createAttackerUnitTableLink(defendersOutside.get(target)) + "[/img]\n\n"; } } String loyalityChangeVal = (wasSnobAttack()) ? String.format(trans.get("acceptance_change"), getAcceptanceBefore(), getAcceptanceAfter()) : ""; String wallChangeVal = (wasWallDamaged()) ? String.format(trans.get("wall_change"), getWallBefore(), getWallAfter()) : ""; String cataChangeVal = (wasBuildingDamaged()) ? String.format(trans.get("building_change"), BuildingSettings.getTranslatedName(aimedBuildingId), getBuildingBefore(), getBuildingAfter()) : ""; return new String[]{attackerVal, sourceVal, defenderVal, targetVal, sendDateVal, resultVal, luckVal, moraleVal, attackerTroopsVal, defenderTroopsVal, troopsOutsideVal, troopsEnRouteVal, loyalityChangeVal, wallChangeVal, cataChangeVal}; } public enum status { LOST_NOTHING, WON_WITH_LOSSES, LOST_EVERYTHING, SPY, HIDDEN } private boolean won = false; private long timestamp = 0; private double luck = 0.0; private double moral = 100.0; private Tribe attacker = null; private Village sourceVillage = null; private TroopAmountFixed attackers = null; private TroopAmountFixed diedAttackers = null; private Tribe defender = null; private Village targetVillage = null; private TroopAmountFixed defenders = null; private TroopAmountFixed diedDefenders = null; private HashMap<Village, TroopAmountFixed> defendersOutside = null; private TroopAmountFixed defendersOnTheWay = null; private int wallBefore = -1; private int wallAfter = -1; private int aimedBuildingId = -1; private int buildingBefore = -1; private int buildingAfter = -1; private int acceptanceBefore = 100; private int acceptanceAfter = 100; private int[] spyedResources = null; private int[] haul = null; private int[] buildingLevels = null; public final int SPY_LEVEL_NONE = 0; public final int SPY_LEVEL_RESOURCES = 1; public final int SPY_LEVEL_BUILDINGS = 2; public final int SPY_LEVEL_OUTSIDE = 3; private int spyLevel = SPY_LEVEL_NONE; public FightReport() { attackers = new TroopAmountFixed(); diedAttackers = new TroopAmountFixed(); defenders = new TroopAmountFixed(); diedDefenders = new TroopAmountFixed(); defendersOutside = new HashMap<>(); defendersOnTheWay = new TroopAmountFixed(); buildingLevels = new int[BuildingSettings.BUILDING_NAMES.length]; Arrays.fill(buildingLevels, -1); } public static String toInternalRepresentation(FightReport pReport) { return JDomUtils.toShortString(pReport.toXml("report")); } public static FightReport fromInternalRepresentation(String pLine) { FightReport r = new FightReport(); try { Document d = JDomUtils.getDocument(pLine); r.loadFromXml((Element) JDomUtils.getNodes(d, "report").get(0)); return r; } catch (Exception e) { return null; } } @Override public void loadFromXml(Element pElement) { try { this.timestamp = Long.parseLong(pElement.getChildText("timestamp")); this.moral = Double.parseDouble(pElement.getChildText("moral")); this.luck = Double.parseDouble(pElement.getChildText("luck")); //attacker stuff Element attackerElement = pElement.getChild("attacker"); Element defenderElement = pElement.getChild("defender"); int source = Integer.parseInt(attackerElement.getChildText("src")); this.sourceVillage = DataHolder.getSingleton().getVillagesById().get(source); int attackerId = Integer.parseInt(attackerElement.getChildText("id")); Tribe attElement = DataHolder.getSingleton().getTribes().get(attackerId); if (attElement != null) { setAttacker(attElement); } else { if (attackerId != -666 && this.sourceVillage != null && this.sourceVillage.getTribe() != null) { setAttacker(this.sourceVillage.getTribe()); } else { setAttacker(InvalidTribe.getSingleton()); } } int target = Integer.parseInt(defenderElement.getChildText("trg")); this.targetVillage = DataHolder.getSingleton().getVillagesById().get(target); int defenderId = Integer.parseInt(defenderElement.getChildText("id")); Tribe defendingTribe = DataHolder.getSingleton().getTribes().get(defenderId); if (defendingTribe != null) { setDefender(defendingTribe); } else { if (defenderId > 0 && this.targetVillage != null && this.targetVillage.getTribe() != null) { setDefender(this.targetVillage.getTribe()); } else { if (defenderId == -666) { setDefender(InvalidTribe.getSingleton()); } else { setDefender(Barbarians.getSingleton()); } } } this.attackers = new TroopAmountFixed(attackerElement.getChild("before")); this.diedAttackers = new TroopAmountFixed(attackerElement.getChild("died")); this.defenders = new TroopAmountFixed(defenderElement.getChild("before")); this.diedDefenders = new TroopAmountFixed(defenderElement.getChild("died")); try { this.defendersOnTheWay = new TroopAmountFixed(defenderElement.getChild("otw")); } catch (Exception ignored) { } Element dDefendersOutside = null; try { dDefendersOutside = defenderElement.getChild("outside"); } catch (Exception ignored) { } this.defendersOutside = new HashMap<>(); if (dDefendersOutside != null) { for (Element e : (List<Element>) JDomUtils.getNodes(dDefendersOutside, "support")) { try { int villageId = e.getAttribute("id").getIntValue(); Village v = DataHolder.getSingleton().getVillagesById().get(villageId); if(v != null) { TroopAmountFixed unitsInvillage = this.defendersOutside.get(v); if (unitsInvillage == null) { unitsInvillage = new TroopAmountFixed(e); } else { unitsInvillage.addAmount(new TroopAmountFixed(e)); } this.defendersOutside.put(v, unitsInvillage); } } catch(Exception ex) { logger.debug("cannot read defenders outside", ex); } } } try { Element e = pElement.getChild("wall"); if(e != null) { this.wallBefore = Byte.parseByte(e.getAttributeValue("before")); this.wallAfter = Byte.parseByte(e.getAttributeValue("after")); } } catch (Exception e) { this.wallBefore = -1; this.wallAfter = -1; } try { Element e = pElement.getChild("building"); if(e != null) { this.aimedBuildingId = Byte.parseByte(e.getAttributeValue("target")); this.buildingBefore = Byte.parseByte(e.getAttributeValue("before")); this.buildingAfter = Byte.parseByte(e.getAttributeValue("after")); } } catch (Exception e) { this.buildingBefore = -1; this.buildingAfter = -1; logger.debug("cannot read building damage", e); } try { Element e = pElement.getChild("acceptance"); if(e != null) { this.acceptanceBefore = Byte.parseByte(e.getAttributeValue("before")); this.acceptanceAfter = Byte.parseByte(e.getAttributeValue("after")); } } catch (Exception e) { this.acceptanceBefore = 100; this.acceptanceAfter = 100; logger.debug("cannot read acceptance", e); } try { Element e = pElement.getChild("spyBuildings"); for(int i = 0; i < this.buildingLevels.length; i++) { this.buildingLevels[i] = Integer.parseInt(e.getAttributeValue( BuildingSettings.BUILDING_NAMES[i])); } } catch (Exception e) { logger.debug("Failed to read buildings", e); } try { this.spyLevel = Integer.parseInt(pElement.getChildText("spyLevel")); } catch (Exception e) { logger.debug("Failed to read spy Level", e); } try { Element haulElm = pElement.getChild("haul"); if(haulElm != null) { this.haul = new int[3]; this.haul[0] = Integer.parseInt(haulElm.getAttributeValue("wood")); this.haul[1] = Integer.parseInt(haulElm.getAttributeValue("clay")); this.haul[2] = Integer.parseInt(haulElm.getAttributeValue("iron")); } } catch (Exception e) { logger.debug("Failed to read haul information", e); this.haul = null; } try { Element spyElm = pElement.getChild("spy"); if(spyElm != null) { this.spyedResources = new int[3]; this.spyedResources[0] = Integer.parseInt(spyElm.getAttributeValue("wood")); this.spyedResources[1] = Integer.parseInt(spyElm.getAttributeValue("clay")); this.spyedResources[2] = Integer.parseInt(spyElm.getAttributeValue("iron")); } } catch (Exception e) { logger.debug("Failed to read spyed resources", e); this.spyedResources = null; } try { this.won = Boolean.parseBoolean(pElement.getChildText("won")); } catch (Exception e) { logger.debug("Failed to read won Level", e); } } catch (Exception e) { logger.warn("failed to fully read the report", e); } } @Override public Element toXml(String elementName) { Element report = new Element(elementName); try { //general part report.addContent(new Element("timestamp").setText(Long.toString(timestamp))); report.addContent(new Element("moral").setText(Double.toString(moral))); report.addContent(new Element("luck").setText(Double.toString(luck))); //attacker part Element attackerE = new Element("attacker"); attackerE.addContent(new Element("id").setText(Integer.toString(attacker.getId()))); attackerE.addContent(new Element("src").setText(Integer.toString(sourceVillage.getId()))); attackerE.addContent(attackers.toXml("before")); attackerE.addContent(diedAttackers.toXml("died")); report.addContent(attackerE); //defender part Element defenderE = new Element("defender"); defenderE.addContent(new Element("id").setText(Integer.toString(defender.getId()))); defenderE.addContent(new Element("trg").setText(Integer.toString(targetVillage.getId()))); defenderE.addContent(defenders.toXml("before")); defenderE.addContent(diedDefenders.toXml("died")); if (whereDefendersOnTheWay()) { defenderE.addContent(defendersOnTheWay.toXml("otw")); } if (whereDefendersOutside()) { Element outsideE = new Element("outside"); for (Village target: defendersOutside.keySet()) { Element defOutside = defendersOutside.get(target).toXml("support"); defOutside.setAttribute("id", Integer.toString(target.getId())); outsideE.addContent(defOutside); } defenderE.addContent(outsideE); } report.addContent(defenderE); if (wasWallDamaged()) { Element wall = new Element("wall"); wall.setAttribute("before", Integer.toString(getWallBefore())); wall.setAttribute("after", Integer.toString(getWallAfter())); report.addContent(wall); } if (wasBuildingDamaged()) { Element building = new Element("building"); building.setAttribute("target", Integer.toString(aimedBuildingId)); building.setAttribute("before", Integer.toString(getBuildingBefore())); building.setAttribute("after", Integer.toString(getBuildingAfter())); report.addContent(building); } if (wasSnobAttack()) { Element building = new Element("acceptance"); building.setAttribute("before", Integer.toString(getAcceptanceBefore())); building.setAttribute("after", Integer.toString(getAcceptanceAfter())); report.addContent(building); } if (haul != null) { Element haulE = new Element("haul"); haulE.setAttribute("wood", Integer.toString(haul[0])); haulE.setAttribute("clay", Integer.toString(haul[1])); haulE.setAttribute("iron", Integer.toString(haul[2])); report.addContent(haulE); } if (spyedResources != null) { Element spy = new Element("spy"); spy.setAttribute("wood", Integer.toString(spyedResources[0])); spy.setAttribute("clay", Integer.toString(spyedResources[1])); spy.setAttribute("iron", Integer.toString(spyedResources[2])); report.addContent(spy); } Element spyBuildings = new Element("spyBuildings"); for(int i = 0; i < buildingLevels.length; i++) { spyBuildings.setAttribute(BuildingSettings.BUILDING_NAMES[i], Integer.toString(buildingLevels[i])); } report.addContent(spyBuildings); report.addContent(new Element("spyLevel").setText(Integer.toString(spyLevel))); report.addContent(new Element("won").setText(Boolean.toString(won))); return report; } catch (Exception e) { logger.error("Exception during generating XML", e); return null; } } /** * @return the attacker */ public Tribe getAttacker() { return attacker; } /** * @param attacker the attacker to set */ public void setAttacker(Tribe attacker) { if (attacker == null) { this.attacker = Barbarians.getSingleton(); } else { this.attacker = attacker; } } /** * @return the sourceVillage */ public Village getSourceVillage() { return sourceVillage; } /** * @param sourceVillage the sourceVillage to set */ public void setSourceVillage(Village sourceVillage) { this.sourceVillage = sourceVillage; } /** * @return the attackers */ public TroopAmountFixed getAttackers() { return attackers; } /** * @param attackers the attackers to set */ public void setAttackers(TroopAmountFixed attackers) { this.attackers = attackers; } /** * @return the diedAttackers */ public TroopAmountFixed getDiedAttackers() { return diedAttackers; } /** * @param diedAttackers the diedAttackers to set */ public void setDiedAttackers(TroopAmountFixed diedAttackers) { this.diedAttackers = diedAttackers; } public TroopAmountFixed getSurvivingAttackers() { TroopAmountFixed result = null; if (!areAttackersHidden() && attackers != null && diedAttackers != null) { result = (TroopAmountFixed) attackers.clone(); result.removeAmount(diedAttackers); } return result; } /** * @return the defender */ public Tribe getDefender() { return defender; } /** * @param defender the defender to set */ public void setDefender(Tribe defender) { if (defender == null) { this.defender = Barbarians.getSingleton(); } else { this.defender = defender; } } /** * @return the targetVillage */ public Village getTargetVillage() { return targetVillage; } /** * @param targetVillage the targetVillage to set */ public void setTargetVillage(Village targetVillage) { this.targetVillage = targetVillage; } public void setSpyedResources(int pWood, int pClay, int pIron) { spyedResources = new int[]{pWood, pClay, pIron}; } public int[] getSpyedResources() { return spyedResources; } public void setHaul(int pWood, int pClay, int pIron) { haul = new int[]{pWood, pClay, pIron}; } public int[] getHaul() { return haul; } /** * @return the defenders */ public TroopAmountFixed getDefenders() { return defenders; } /** * @param defenders the defenders to set */ public void setDefenders(TroopAmountFixed defenders) { this.defenders = defenders; } /** * @return the diedDefenders */ public TroopAmountFixed getDiedDefenders() { return diedDefenders; } public TroopAmountFixed getSurvivingDefenders() { TroopAmountFixed result = null; if (!wasLostEverything() && defenders != null && diedDefenders != null) { result = (TroopAmountFixed) defenders.clone(); result.removeAmount(diedDefenders); } return result; } public boolean hasSurvivedDefenders() { return (getSurvivingDefenders().getTroopPopCount() != 0); } /** * @param diedDefenders the diedDefenders to set */ public void setDiedDefenders(TroopAmountFixed diedDefenders) { this.diedDefenders = diedDefenders; } public void addDefendersOutside(Village pVillage, TroopAmountFixed pDefenders) { defendersOutside.put(pVillage, pDefenders); } public boolean wasLostEverything() { //defenders are set to -1 if no information on them could be achieved as result of a total loss return !defenders.containsInformation(); } public boolean isSimpleSnobAttack() { if (!wasSnobAttack()) { //acceptance reduced, must be snob return false; } return (attackers.getTroopSum() < 1000); } //@TODO configurable guess public int guessType() { if (wasSnobAttack() || isSimpleSnobAttack()) { //acceptance reduced, must be snob return Attack.SNOB_TYPE; } if (areAttackersHidden()) { //attackers hidden, no info possible return Attack.NO_TYPE; } boolean isSnobAttack = false; int attackerCount = 0; int spyCount = 0; if (attackers != null) { attackerCount = attackers.getTroopSum(); if (attackers.getAmountForUnit("snob") >= 1) { isSnobAttack = true; } if (attackers.getAmountForUnit("spy") >= 1) { spyCount = attackers.getAmountForUnit("spy"); } } if (isSnobAttack) { //snob joined attack but no acceptance was reduces return Attack.SNOB_TYPE; } double spyPerc = 100.0 * (double) spyCount / (double) attackerCount; if (spyPerc > 50.0) { //only spies joined the attack return Attack.SPY_TYPE; } if (attackerCount < 500) { return Attack.FAKE_TYPE; } return Attack.CLEAN_TYPE; } public boolean wasLostNothing() { if (areAttackersHidden()) { return false; } return diedAttackers.getTroopSum() == 0; } public boolean areAttackersHidden() { return !attackers.containsInformation(); } public boolean whereDefendersOnTheWay() { return (defendersOnTheWay != null && defendersOnTheWay.getTroopSum() != 0); } public boolean whereDefendersOutside() { return (defendersOutside != null && !defendersOutside.isEmpty()); } /** * @return the defendersOutside */ public TroopAmountFixed getDefendersOnTheWay() { return defendersOnTheWay; } /** * @return the defendersOutside */ public HashMap<Village, TroopAmountFixed> getDefendersOutside() { return defendersOutside; } /** * @param defendersOnTheWay the defendersOnTheWay to set */ public void setDefendersOnTheWay(TroopAmountFixed defendersOnTheWay) { this.defendersOnTheWay = defendersOnTheWay; } /** * @return the wallBefore */ public int getWallBefore() { return wallBefore; } /** * @param wallBefore the wallBefore to set */ public void setWallBefore(int wallBefore) { this.wallBefore = wallBefore; } /** * @return the wallAfter */ public int getWallAfter() { return wallAfter; } /** * @param wallAfter the wallAfter to set */ public void setWallAfter(int wallAfter) { this.wallAfter = wallAfter; } /** * @return the aimedBuilding */ public int getAimedBuildingId() { return aimedBuildingId; } /** * @param aimedBuilding the aimedBuilding to set */ public void setAimedBuildingId(int pAimedBuildingId) { this.aimedBuildingId = pAimedBuildingId; } /** * @return the buildingBefore */ public int getBuildingBefore() { return buildingBefore; } /** * @param buildingBefore the buildingBefore to set */ public void setBuildingBefore(int buildingBefore) { this.buildingBefore = buildingBefore; } /** * @return the buildingAfter */ public int getBuildingAfter() { return buildingAfter; } /** * @param buildingAfter the buildingAfter to set */ public void setBuildingAfter(int buildingAfter) { this.buildingAfter = buildingAfter; } /** * @return the acceptanceBefore */ public int getAcceptanceBefore() { return acceptanceBefore; } /** * @param acceptanceBefore the acceptanceBefore to set */ public void setAcceptanceBefore(int acceptanceBefore) { this.acceptanceBefore = acceptanceBefore; } /** * @return the acceptanceAfter */ public int getAcceptanceAfter() { return acceptanceAfter; } /** * @param acceptanceAfter the acceptanceAfter to set */ public void setAcceptanceAfter(int acceptanceAfter) { this.acceptanceAfter = acceptanceAfter; } public boolean wasWallDamaged() { return (getWallBefore() > 0); } public boolean wasBuildingDamaged() { return (getBuildingBefore() > 0); } public boolean isSpyReport() { if (wasLostEverything()) { return false; } boolean spySurvived = false; TroopAmountFixed survivingAtt = getSurvivingAttackers(); for (UnitHolder unit: DataHolder.getSingleton().getUnits()) { if (unit.getPlainName().equals("spy")) { if (survivingAtt.getAmountForUnit(unit) > 0) { spySurvived = true; } } else { if (survivingAtt == null || survivingAtt.getAmountForUnit(unit) > 0) { //something else survived too or the attackers are hidden return false; } } } return spySurvived; } public int getDestroyedWallLevels() { if (wasWallDamaged()) { return getWallBefore() - getWallAfter(); } return 0; } public int getDestroyedBuildingLevels() { if (wasBuildingDamaged()) { return getBuildingBefore() - getBuildingAfter(); } return 0; } public boolean wasSnobAttack() { return getAcceptanceAfter() < getAcceptanceBefore(); } public boolean wasConquered() { return (getAcceptanceAfter() <= 0); } /** * @return the won */ public boolean isWon() { return won; } /** * @param won the won to set */ public void setWon(boolean won) { this.won = won; } /** * @return the luck */ public double getLuck() { return luck; } /** * @param luck the luck to set */ public void setLuck(double luck) { this.luck = luck; } /** * @return the moral */ public double getMoral() { return moral; } /** * @param moral the moral to set */ public void setMoral(double moral) { this.moral = moral; } public void setTimestamp(long timestamp) { this.timestamp = timestamp; } public long getTimestamp() { return timestamp; } public boolean isValid() { return (attacker != null && sourceVillage != null && attackers != null && diedAttackers != null && defender != null && targetVillage != null && defenders != null && diedDefenders != null); } public int getVillageEffects() { int effect = 0; if (wasWallDamaged()) { effect += 1; } if (wasBuildingDamaged()) { effect += 2; } if (wasConquered()) { effect += 4; } return effect; } public Integer getComparableValue() { if (areAttackersHidden()) { //grey report return 4; } else if (isSpyReport()) { //blue report return 2; } else if (wasLostEverything()) { //red report return 3; } else if (wasLostNothing()) { //green report return 0; } else { //yellow report return 1; } } @Override public String toString() { StringBuilder result = new StringBuilder(); SimpleDateFormat f = TimeManager.getSimpleDateFormat("dd.MM.yy HH:mm:ss:SSS"); result.append(trans.get("sent")).append(": ").append(f.format(new Date(timestamp))).append("\n"); result.append(won ? trans.get("won") : trans.get("lost")).append("\n"); if (isSpyReport()) { result.append(trans.get("color_blue")); } else if (wasLostEverything()) { result.append(trans.get("color_red")); } else if (wasLostNothing()) { result.append(trans.get("color_green")); } else { result.append(trans.get("color_yellow")); } result.append("\n"); result.append(trans.get("morale")).append(": ").append(moral).append("\n"); result.append(trans.get("luck")).append(": ").append(luck).append("\n"); result.append(trans.get("attacker_tribe")).append(": ").append(attacker).append("\n"); result.append(trans.get("attacker_village")).append(": ").append(sourceVillage).append("\n"); String sAttackers = ""; String sAttackersDied = ""; String sDefenders = ""; String sDefendersDied = ""; if (areAttackersHidden()) { sAttackers = trans.get("hidden") + "\n"; sAttackersDied = trans.get("hidden") + "\n"; } else { for (UnitHolder unit : DataHolder.getSingleton().getUnits()) { sAttackers += attackers.getAmountForUnit(unit) + " "; sAttackersDied += diedAttackers.getAmountForUnit(unit) + " "; } sAttackers = sAttackers.trim() + "\n"; sAttackersDied = sAttackersDied.trim() + "\n"; } if (wasLostEverything()) { sDefenders = trans.get("no_informations") + "\n"; sDefendersDied = trans.get("no_informations") + "\n"; } else { for (UnitHolder unit : DataHolder.getSingleton().getUnits()) { sDefenders += defenders.getAmountForUnit(unit) + " "; sDefendersDied += diedDefenders.getAmountForUnit(unit) + " "; } sDefenders = sDefenders.trim() + "\n"; sDefendersDied = sDefendersDied.trim() + "\n"; } result.append(trans.get("amount")).append(": ").append(sAttackers); result.append(trans.get("looses")).append(": ").append(sAttackersDied); result.append(trans.get("defender_tribe")).append(": ").append(defender).append("\n"); result.append(trans.get("defender_village")).append(": ").append(targetVillage).append("\n"); result.append(trans.get("amount")).append(": ").append(sDefenders); result.append(trans.get("looses")).append(": ").append(sDefendersDied); if (wasConquered()) { if (whereDefendersOutside()) { Set<Village> villageKeys = defendersOutside.keySet(); for (Village v: villageKeys) { if (v != null) { TroopAmountFixed troops = defendersOutside.get(v); if (troops != null) { result.append(" -> ").append(v).append(" "); for (UnitHolder u : DataHolder.getSingleton().getUnits()) { result.append(troops.getAmountForUnit(u)).append(" "); } } result.append("\n"); } } } } if (wasWallDamaged()) { result.append(String.format(trans.get("wall_change_string"), getWallBefore(), getWallAfter())).append("\n"); } if (wasBuildingDamaged()) { result.append(String.format(trans.get("building_change_string"), BuildingSettings.getTranslatedName(aimedBuildingId), getBuildingBefore(), getBuildingAfter())).append("\n"); } if (wasSnobAttack()) { result.append(String.format(trans.get("acceptance_change"), getAcceptanceBefore(), getAcceptanceAfter())).append("\n"); } return result.toString(); } @Override public int compareTo(FightReport o) { return getComparableValue().compareTo(o.getComparableValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof FightReport)) { return false; } FightReport theOther = (FightReport) obj; return hashCode() == theOther.hashCode(); } @Override public int hashCode() { int hash = 5; hash = 53 * hash + (this.won ? 1 : 0); hash = 53 * hash + (int) (this.timestamp ^ (this.timestamp >>> 32)); hash = 53 * hash + (int) (Double.doubleToLongBits(this.luck) ^ (Double.doubleToLongBits(this.luck) >>> 32)); hash = 53 * hash + (int) (Double.doubleToLongBits(this.moral) ^ (Double.doubleToLongBits(this.moral) >>> 32)); hash = 53 * hash + (this.attacker != null ? this.attacker.hashCode() : 0); hash = 53 * hash + (this.sourceVillage != null ? this.sourceVillage.hashCode() : 0); hash = 53 * hash + (this.attackers != null ? this.attackers.hashCode() : 0); hash = 53 * hash + (this.diedAttackers != null ? this.diedAttackers.hashCode() : 0); hash = 53 * hash + (this.defender != null ? this.defender.hashCode() : 0); hash = 53 * hash + (this.targetVillage != null ? this.targetVillage.hashCode() : 0); hash = 53 * hash + (this.defenders != null ? this.defenders.hashCode() : 0); hash = 53 * hash + (this.diedDefenders != null ? this.diedDefenders.hashCode() : 0); hash = 53 * hash + (this.defendersOutside != null ? this.defendersOutside.hashCode() : 0); hash = 53 * hash + (this.defendersOnTheWay != null ? this.defendersOnTheWay.hashCode() : 0); hash = 53 * hash + this.wallBefore; hash = 53 * hash + this.wallAfter; hash = 53 * hash + this.aimedBuildingId; hash = 53 * hash + this.buildingBefore; hash = 53 * hash + this.buildingAfter; hash = 53 * hash + this.acceptanceBefore; hash = 53 * hash + this.acceptanceAfter; hash = 53 * hash + Arrays.hashCode(this.spyedResources); hash = 53 * hash + Arrays.hashCode(this.haul); for(int i = 0; i < this.buildingLevels.length; i++) hash = 53 * hash + this.buildingLevels[i]; return hash; } /* This method fills buildings that had not been spyed with zero, because buildings with level 0 are not shown by DS */ public void fillMissingSpyInformation() { logger.debug(JDomUtils.toShortString(toXml("report"))); if (spyedResources != null) { if(spyedResources[0] != 0) spyLevel = SPY_LEVEL_RESOURCES; if(spyedResources[1] != 0) spyLevel = SPY_LEVEL_RESOURCES; if(spyedResources[2] != 0) spyLevel = SPY_LEVEL_RESOURCES; } for(int i = 0; i < buildingLevels.length; i++) { if(buildingLevels[i] != -1) spyLevel = SPY_LEVEL_BUILDINGS; } if(whereDefendersOnTheWay() && spyLevel == SPY_LEVEL_BUILDINGS) { //Some Buildings e.g. main cannot be zero //outside Troops can only be spyed if buildings were spyed too spyLevel = SPY_LEVEL_OUTSIDE; } //set wall destruction (works also without spying) if (wallAfter != -1 && spyLevel < SPY_LEVEL_BUILDINGS) { buildingLevels[BuildingSettings.getBuildingIdByName("wall")] = wallAfter; } switch (spyLevel) { case SPY_LEVEL_OUTSIDE: case SPY_LEVEL_BUILDINGS: for(int i = 0; i < this.buildingLevels.length; i++) if(this.buildingLevels[i] == -1) this.buildingLevels[i] = 0; case SPY_LEVEL_RESOURCES: if(spyedResources == null) spyedResources = new int[]{0, 0, 0}; default: } logger.debug(JDomUtils.toShortString(toXml("report"))); } public void setDefendersOutside(HashMap<Village, TroopAmountFixed> pDefendersOutside) { this.defendersOutside = pDefendersOutside; } public void setBuilding(int pBuildingId, int pLevel) { buildingLevels[pBuildingId] = pLevel; } public int getBuilding(int pBuilding) { return buildingLevels[pBuilding]; } public int getSpyLevel() { return spyLevel; } public status getStatus() { if (areAttackersHidden()) return status.HIDDEN; if (isSpyReport()) return status.SPY; if (wasLostEverything()) return status.LOST_EVERYTHING; if (wasLostNothing()) return status.LOST_NOTHING; return status.WON_WITH_LOSSES; } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.reactivex.internal.operators.observable; import static org.mockito.Mockito.*; import java.util.List; import org.junit.*; import org.mockito.MockitoAnnotations; import io.reactivex.*; import io.reactivex.disposables.Disposables; import io.reactivex.exceptions.TestException; import io.reactivex.functions.*; import io.reactivex.internal.functions.Functions; import io.reactivex.observers.TestObserver; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.subjects.PublishSubject; public class ObservableJoinTest { Observer<Object> observer = TestHelper.mockObserver(); BiFunction<Integer, Integer, Integer> add = new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { return t1 + t2; } }; <T> Function<Integer, Observable<T>> just(final Observable<T> observable) { return new Function<Integer, Observable<T>>() { @Override public Observable<T> apply(Integer t1) { return observable; } }; } @Before public void before() { MockitoAnnotations.initMocks(this); } @Test public void normal1() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> m = source1.join(source2, just(Observable.never()), just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); source1.onNext(2); source1.onNext(4); source2.onNext(16); source2.onNext(32); source2.onNext(64); source1.onComplete(); source2.onComplete(); verify(observer, times(1)).onNext(17); verify(observer, times(1)).onNext(18); verify(observer, times(1)).onNext(20); verify(observer, times(1)).onNext(33); verify(observer, times(1)).onNext(34); verify(observer, times(1)).onNext(36); verify(observer, times(1)).onNext(65); verify(observer, times(1)).onNext(66); verify(observer, times(1)).onNext(68); verify(observer, times(1)).onComplete(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void normal1WithDuration() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); PublishSubject<Integer> duration1 = PublishSubject.create(); Observable<Integer> m = source1.join(source2, just(duration1), just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); source1.onNext(2); source2.onNext(16); duration1.onNext(1); source1.onNext(4); source1.onNext(8); source1.onComplete(); source2.onComplete(); verify(observer, times(1)).onNext(17); verify(observer, times(1)).onNext(18); verify(observer, times(1)).onNext(20); verify(observer, times(1)).onNext(24); verify(observer, times(1)).onComplete(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void normal2() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> m = source1.join(source2, just(Observable.never()), just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); source1.onNext(2); source1.onComplete(); source2.onNext(16); source2.onNext(32); source2.onNext(64); source2.onComplete(); verify(observer, times(1)).onNext(17); verify(observer, times(1)).onNext(18); verify(observer, times(1)).onNext(33); verify(observer, times(1)).onNext(34); verify(observer, times(1)).onNext(65); verify(observer, times(1)).onNext(66); verify(observer, times(1)).onComplete(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void leftThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> m = source1.join(source2, just(Observable.never()), just(Observable.never()), add); m.subscribe(observer); source2.onNext(1); source1.onError(new RuntimeException("Forced failure")); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void rightThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> m = source1.join(source2, just(Observable.never()), just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); source2.onError(new RuntimeException("Forced failure")); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void leftDurationThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> duration1 = Observable.<Integer> error(new RuntimeException("Forced failure")); Observable<Integer> m = source1.join(source2, just(duration1), just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void rightDurationThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Observable<Integer> duration1 = Observable.<Integer> error(new RuntimeException("Forced failure")); Observable<Integer> m = source1.join(source2, just(Observable.never()), just(duration1), add); m.subscribe(observer); source2.onNext(1); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void leftDurationSelectorThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Function<Integer, Observable<Integer>> fail = new Function<Integer, Observable<Integer>>() { @Override public Observable<Integer> apply(Integer t1) { throw new RuntimeException("Forced failure"); } }; Observable<Integer> m = source1.join(source2, fail, just(Observable.never()), add); m.subscribe(observer); source1.onNext(1); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void rightDurationSelectorThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); Function<Integer, Observable<Integer>> fail = new Function<Integer, Observable<Integer>>() { @Override public Observable<Integer> apply(Integer t1) { throw new RuntimeException("Forced failure"); } }; Observable<Integer> m = source1.join(source2, just(Observable.never()), fail, add); m.subscribe(observer); source2.onNext(1); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void resultSelectorThrows() { PublishSubject<Integer> source1 = PublishSubject.create(); PublishSubject<Integer> source2 = PublishSubject.create(); BiFunction<Integer, Integer, Integer> fail = new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { throw new RuntimeException("Forced failure"); } }; Observable<Integer> m = source1.join(source2, just(Observable.never()), just(Observable.never()), fail); m.subscribe(observer); source1.onNext(1); source2.onNext(2); verify(observer, times(1)).onError(any(Throwable.class)); verify(observer, never()).onComplete(); verify(observer, never()).onNext(any()); } @Test public void dispose() { TestHelper.checkDisposed(PublishSubject.<Integer>create().join(Observable.just(1), Functions.justFunction(Observable.never()), Functions.justFunction(Observable.never()), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } })); } @Test public void take() { Observable.just(1).join( Observable.just(2), Functions.justFunction(Observable.never()), Functions.justFunction(Observable.never()), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } }) .take(1) .test() .assertResult(3); } @Test public void rightClose() { PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.join(Observable.just(2), Functions.justFunction(Observable.never()), Functions.justFunction(Observable.empty()), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } }) .test() .assertEmpty(); ps.onNext(1); to.assertEmpty(); } @Test public void resultSelectorThrows2() { PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.join( Observable.just(2), Functions.justFunction(Observable.never()), Functions.justFunction(Observable.never()), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { throw new TestException(); } }) .test(); ps.onNext(1); ps.onComplete(); to.assertFailure(TestException.class); } @Test public void badOuterSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { observer.onSubscribe(Disposables.empty()); observer.onError(new TestException("First")); observer.onError(new TestException("Second")); } } .join(Observable.just(2), Functions.justFunction(Observable.never()), Functions.justFunction(Observable.never()), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } }) .test() .assertFailureAndMessage(TestException.class, "First"); TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second"); } finally { RxJavaPlugins.reset(); } } @Test public void badEndSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { @SuppressWarnings("rawtypes") final Observer[] o = { null }; TestObserver<Integer> to = Observable.just(1) .join(Observable.just(2), Functions.justFunction(Observable.never()), Functions.justFunction(new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { o[0] = observer; observer.onSubscribe(Disposables.empty()); observer.onError(new TestException("First")); } }), new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } }) .test(); o[0].onError(new TestException("Second")); to .assertFailureAndMessage(TestException.class, "First"); TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second"); } finally { RxJavaPlugins.reset(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.preloader; import java.io.Externalizable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.GridDirectCollection; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheDeployable; import org.apache.ignite.internal.processors.cache.GridCacheEntryInfo; import org.apache.ignite.internal.processors.cache.GridCacheMessage; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; /** * Force keys response. Contains absent keys. */ public class GridDhtForceKeysResponse extends GridCacheMessage implements GridCacheDeployable { /** */ private static final long serialVersionUID = 0L; /** Future ID. */ private IgniteUuid futId; /** Mini-future ID. */ private IgniteUuid miniId; /** Error. */ @GridDirectTransient private volatile IgniteCheckedException err; /** Serialized error. */ private byte[] errBytes; /** Missed (not found) keys. */ @GridToStringInclude @GridDirectCollection(KeyCacheObject.class) private List<KeyCacheObject> missedKeys; /** Cache entries. */ @GridToStringInclude @GridDirectCollection(GridCacheEntryInfo.class) private List<GridCacheEntryInfo> infos; /** * Required by {@link Externalizable}. */ public GridDhtForceKeysResponse() { // No-op. } /** * @param cacheId Cache ID. * @param futId Request id. * @param miniId Mini-future ID. * @param addDepInfo Deployment info flag. */ public GridDhtForceKeysResponse(int cacheId, IgniteUuid futId, IgniteUuid miniId, boolean addDepInfo) { assert futId != null; assert miniId != null; this.cacheId = cacheId; this.futId = futId; this.miniId = miniId; this.addDepInfo = addDepInfo; } /** * Sets error. * @param err Error. */ public void error(IgniteCheckedException err){ this.err = err; } /** {@inheritDoc} */ @Override public IgniteCheckedException error() { return err; } /** * @return Keys. */ public Collection<KeyCacheObject> missedKeys() { return missedKeys == null ? Collections.<KeyCacheObject>emptyList() : missedKeys; } /** * @return Forced entries. */ public Collection<GridCacheEntryInfo> forcedInfos() { return infos == null ? Collections.<GridCacheEntryInfo>emptyList() : infos; } /** * @return Future ID. */ public IgniteUuid futureId() { return futId; } /** * @return Mini-future ID. */ public IgniteUuid miniId() { return miniId; } /** * @param key Key. */ public void addMissed(KeyCacheObject key) { if (missedKeys == null) missedKeys = new ArrayList<>(); missedKeys.add(key); } /** * @param info Entry info to add. */ public void addInfo(GridCacheEntryInfo info) { assert info != null; if (infos == null) infos = new ArrayList<>(); infos.add(info); } /** {@inheritDoc} * @param ctx*/ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); GridCacheContext cctx = ctx.cacheContext(cacheId); if (missedKeys != null) prepareMarshalCacheObjects(missedKeys, cctx); if (infos != null) { for (GridCacheEntryInfo info : infos) info.marshal(cctx); } if (err != null && errBytes == null) errBytes = U.marshal(ctx, err); } /** {@inheritDoc} */ @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); GridCacheContext cctx = ctx.cacheContext(cacheId); if (missedKeys != null) finishUnmarshalCacheObjects(missedKeys, cctx, ldr); if (infos != null) { for (GridCacheEntryInfo info : infos) info.unmarshal(cctx, ldr); } if (errBytes != null && err == null) err = U.unmarshal(ctx, errBytes, U.resolveClassLoader(ldr, ctx.gridConfig())); } /** {@inheritDoc} */ @Override public boolean addDeploymentInfo() { return addDepInfo; } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 3: if (!writer.writeByteArray("errBytes", errBytes)) return false; writer.incrementState(); case 4: if (!writer.writeIgniteUuid("futId", futId)) return false; writer.incrementState(); case 5: if (!writer.writeCollection("infos", infos, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 6: if (!writer.writeIgniteUuid("miniId", miniId)) return false; writer.incrementState(); case 7: if (!writer.writeCollection("missedKeys", missedKeys, MessageCollectionItemType.MSG)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 3: errBytes = reader.readByteArray("errBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: futId = reader.readIgniteUuid("futId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 5: infos = reader.readCollection("infos", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); case 6: miniId = reader.readIgniteUuid("miniId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 7: missedKeys = reader.readCollection("missedKeys", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridDhtForceKeysResponse.class); } /** {@inheritDoc} */ @Override public short directType() { return 43; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 8; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtForceKeysResponse.class, this, super.toString()); } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT; import static io.netty.handler.codec.http2.Http2TestUtil.as; import static io.netty.handler.codec.http2.Http2TestUtil.runInChannel; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.verify; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.AsciiString; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaderValues; import io.netty.handler.codec.http2.Http2TestUtil.FrameAdapter; import io.netty.handler.codec.http2.Http2TestUtil.FrameCountDown; import io.netty.handler.codec.http2.Http2TestUtil.Http2Runnable; import io.netty.util.CharsetUtil; import io.netty.util.NetUtil; import io.netty.util.concurrent.Future; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Random; import java.util.concurrent.CountDownLatch; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Test for data decompression in the HTTP/2 codec. */ public class DataCompressionHttp2Test { private static final AsciiString GET = as("GET"); private static final AsciiString POST = as("POST"); private static final AsciiString PATH = as("/some/path"); @Mock private Http2FrameListener serverListener; @Mock private Http2FrameListener clientListener; private Http2ConnectionEncoder clientEncoder; private ServerBootstrap sb; private Bootstrap cb; private Channel serverChannel; private Channel clientChannel; private CountDownLatch serverLatch; private CountDownLatch clientLatch; private CountDownLatch clientSettingsAckLatch; private Http2Connection serverConnection; private Http2Connection clientConnection; private ByteArrayOutputStream serverOut; @Before public void setup() throws InterruptedException { MockitoAnnotations.initMocks(this); } @After public void cleaup() throws IOException { serverOut.close(); } @After public void teardown() throws InterruptedException { serverChannel.close().sync(); Future<?> serverGroup = sb.group().shutdownGracefully(0, 0, MILLISECONDS); Future<?> serverChildGroup = sb.childGroup().shutdownGracefully(0, 0, MILLISECONDS); Future<?> clientGroup = cb.group().shutdownGracefully(0, 0, MILLISECONDS); serverGroup.sync(); serverChildGroup.sync(); clientGroup.sync(); } @Test public void justHeadersNoData() throws Exception { bootstrapEnv(1, 1, 0, 1); final Http2Headers headers = new DefaultHttp2Headers().method(GET).path(PATH) .set(HttpHeaderNames.CONTENT_ENCODING, HttpHeaderValues.GZIP); // Required because the decompressor intercepts the onXXXRead events before // our {@link Http2TestUtil$FrameAdapter} does. FrameAdapter.getOrCreateStream(serverConnection, 3, false); FrameAdapter.getOrCreateStream(clientConnection, 3, false); runInChannel(clientChannel, new Http2Runnable() { @Override public void run() { clientEncoder.writeHeaders(ctxClient(), 3, headers, 0, true, newPromiseClient()); ctxClient().flush(); } }); awaitServer(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(headers), eq(0), eq(DEFAULT_PRIORITY_WEIGHT), eq(false), eq(0), eq(true)); } @Test public void gzipEncodingSingleEmptyMessage() throws Exception { final String text = ""; final ByteBuf data = Unpooled.copiedBuffer(text.getBytes()); bootstrapEnv(1, 1, data.readableBytes(), 1); try { final Http2Headers headers = new DefaultHttp2Headers().method(POST).path(PATH) .set(HttpHeaderNames.CONTENT_ENCODING, HttpHeaderValues.GZIP); // Required because the decompressor intercepts the onXXXRead events before // our {@link Http2TestUtil$FrameAdapter} does. Http2Stream stream = FrameAdapter.getOrCreateStream(serverConnection, 3, false); FrameAdapter.getOrCreateStream(clientConnection, 3, false); runInChannel(clientChannel, new Http2Runnable() { @Override public void run() { clientEncoder.writeHeaders(ctxClient(), 3, headers, 0, false, newPromiseClient()); clientEncoder.writeData(ctxClient(), 3, data.retain(), 0, true, newPromiseClient()); ctxClient().flush(); } }); awaitServer(); assertEquals(0, serverConnection.local().flowController().unconsumedBytes(stream)); assertEquals(text, serverOut.toString(CharsetUtil.UTF_8.name())); } finally { data.release(); } } @Test public void gzipEncodingSingleMessage() throws Exception { final String text = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccc"; final ByteBuf data = Unpooled.copiedBuffer(text.getBytes()); bootstrapEnv(1, 1, data.readableBytes(), 1); try { final Http2Headers headers = new DefaultHttp2Headers().method(POST).path(PATH) .set(HttpHeaderNames.CONTENT_ENCODING, HttpHeaderValues.GZIP); // Required because the decompressor intercepts the onXXXRead events before // our {@link Http2TestUtil$FrameAdapter} does. Http2Stream stream = FrameAdapter.getOrCreateStream(serverConnection, 3, false); FrameAdapter.getOrCreateStream(clientConnection, 3, false); runInChannel(clientChannel, new Http2Runnable() { @Override public void run() { clientEncoder.writeHeaders(ctxClient(), 3, headers, 0, false, newPromiseClient()); clientEncoder.writeData(ctxClient(), 3, data.retain(), 0, true, newPromiseClient()); ctxClient().flush(); } }); awaitServer(); assertEquals(0, serverConnection.local().flowController().unconsumedBytes(stream)); assertEquals(text, serverOut.toString(CharsetUtil.UTF_8.name())); } finally { data.release(); } } @Test public void gzipEncodingMultipleMessages() throws Exception { final String text1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccc"; final String text2 = "dddddddddddddddddddeeeeeeeeeeeeeeeeeeeffffffffffffffffffff"; final ByteBuf data1 = Unpooled.copiedBuffer(text1.getBytes()); final ByteBuf data2 = Unpooled.copiedBuffer(text2.getBytes()); bootstrapEnv(1, 1, data1.readableBytes() + data2.readableBytes(), 1); try { final Http2Headers headers = new DefaultHttp2Headers().method(POST).path(PATH) .set(HttpHeaderNames.CONTENT_ENCODING, HttpHeaderValues.GZIP); // Required because the decompressor intercepts the onXXXRead events before // our {@link Http2TestUtil$FrameAdapter} does. Http2Stream stream = FrameAdapter.getOrCreateStream(serverConnection, 3, false); FrameAdapter.getOrCreateStream(clientConnection, 3, false); runInChannel(clientChannel, new Http2Runnable() { @Override public void run() { clientEncoder.writeHeaders(ctxClient(), 3, headers, 0, false, newPromiseClient()); clientEncoder.writeData(ctxClient(), 3, data1.retain(), 0, false, newPromiseClient()); clientEncoder.writeData(ctxClient(), 3, data2.retain(), 0, true, newPromiseClient()); ctxClient().flush(); } }); awaitServer(); assertEquals(0, serverConnection.local().flowController().unconsumedBytes(stream)); assertEquals(new StringBuilder(text1).append(text2).toString(), serverOut.toString(CharsetUtil.UTF_8.name())); } finally { data1.release(); data2.release(); } } @Test public void deflateEncodingWriteLargeMessage() throws Exception { final int BUFFER_SIZE = 1 << 12; final byte[] bytes = new byte[BUFFER_SIZE]; new Random().nextBytes(bytes); bootstrapEnv(1, 1, BUFFER_SIZE, 1); final ByteBuf data = Unpooled.wrappedBuffer(bytes); try { final Http2Headers headers = new DefaultHttp2Headers().method(POST).path(PATH) .set(HttpHeaderNames.CONTENT_ENCODING, HttpHeaderValues.DEFLATE); // Required because the decompressor intercepts the onXXXRead events before // our {@link Http2TestUtil$FrameAdapter} does. Http2Stream stream = FrameAdapter.getOrCreateStream(serverConnection, 3, false); FrameAdapter.getOrCreateStream(clientConnection, 3, false); runInChannel(clientChannel, new Http2Runnable() { @Override public void run() { clientEncoder.writeHeaders(ctxClient(), 3, headers, 0, false, newPromiseClient()); clientEncoder.writeData(ctxClient(), 3, data.retain(), 0, true, newPromiseClient()); ctxClient().flush(); } }); awaitServer(); assertEquals(0, serverConnection.local().flowController().unconsumedBytes(stream)); assertEquals(data.resetReaderIndex().toString(CharsetUtil.UTF_8), serverOut.toString(CharsetUtil.UTF_8.name())); } finally { data.release(); } } private void bootstrapEnv(int serverHalfClosedCount, int clientSettingsAckLatchCount, int serverOutSize, int clientCount) throws Exception { serverOut = new ByteArrayOutputStream(serverOutSize); serverLatch = new CountDownLatch(serverHalfClosedCount); clientLatch = new CountDownLatch(clientCount); clientSettingsAckLatch = new CountDownLatch(clientSettingsAckLatchCount); sb = new ServerBootstrap(); cb = new Bootstrap(); // Streams are created before the normal flow for this test, so these connection must be initialized up front. serverConnection = new DefaultHttp2Connection(true); clientConnection = new DefaultHttp2Connection(false); serverConnection.addListener(new Http2ConnectionAdapter() { @Override public void streamHalfClosed(Http2Stream stream) { serverLatch.countDown(); } }); doAnswer(new Answer<Integer>() { @Override public Integer answer(InvocationOnMock in) throws Throwable { ByteBuf buf = (ByteBuf) in.getArguments()[2]; int padding = (Integer) in.getArguments()[3]; int processedBytes = buf.readableBytes() + padding; buf.readBytes(serverOut, buf.readableBytes()); return processedBytes; } }).when(serverListener).onDataRead(any(ChannelHandlerContext.class), anyInt(), any(ByteBuf.class), anyInt(), anyBoolean()); final CountDownLatch serverChannelLatch = new CountDownLatch(1); sb.group(new NioEventLoopGroup(), new NioEventLoopGroup()); sb.channel(NioServerSocketChannel.class); sb.childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline p = ch.pipeline(); Http2FrameWriter writer = new DefaultHttp2FrameWriter(); Http2ConnectionHandler connectionHandler = new Http2ConnectionHandler(new DefaultHttp2ConnectionDecoder.Builder() .connection(serverConnection) .frameReader(new DefaultHttp2FrameReader()) .listener( new DelegatingDecompressorFrameListener(serverConnection, serverListener)), new CompressorHttp2ConnectionEncoder.Builder().connection( serverConnection).frameWriter(writer)); p.addLast(connectionHandler); serverChannelLatch.countDown(); } }); cb.group(new NioEventLoopGroup()); cb.channel(NioSocketChannel.class); cb.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline p = ch.pipeline(); FrameCountDown clientFrameCountDown = new FrameCountDown(clientListener, clientSettingsAckLatch, clientLatch); Http2FrameWriter writer = new DefaultHttp2FrameWriter(); Http2ConnectionHandler connectionHandler = new Http2ConnectionHandler(new DefaultHttp2ConnectionDecoder.Builder() .connection(clientConnection) .frameReader(new DefaultHttp2FrameReader()) .listener( new DelegatingDecompressorFrameListener(clientConnection, clientFrameCountDown)), new CompressorHttp2ConnectionEncoder.Builder().connection( clientConnection).frameWriter(writer)); clientEncoder = connectionHandler.encoder(); p.addLast(connectionHandler); } }); serverChannel = sb.bind(new InetSocketAddress(0)).sync().channel(); int port = ((InetSocketAddress) serverChannel.localAddress()).getPort(); ChannelFuture ccf = cb.connect(new InetSocketAddress(NetUtil.LOCALHOST, port)); assertTrue(ccf.awaitUninterruptibly().isSuccess()); clientChannel = ccf.channel(); assertTrue(serverChannelLatch.await(5, SECONDS)); } private void awaitServer() throws Exception { assertTrue(clientSettingsAckLatch.await(5, SECONDS)); assertTrue(serverLatch.await(5, SECONDS)); serverOut.flush(); } private ChannelHandlerContext ctxClient() { return clientChannel.pipeline().firstContext(); } private ChannelPromise newPromiseClient() { return ctxClient().newPromise(); } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime.commands; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.Constants; import com.google.devtools.build.lib.analysis.BlazeVersionInfo; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.ProtoUtils; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClassProvider; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.query2.proto.proto2api.Build.AllowedRuleClassInfo; import com.google.devtools.build.lib.query2.proto.proto2api.Build.AttributeDefinition; import com.google.devtools.build.lib.query2.proto.proto2api.Build.BuildLanguage; import com.google.devtools.build.lib.query2.proto.proto2api.Build.RuleDefinition; import com.google.devtools.build.lib.runtime.BlazeCommand; import com.google.devtools.build.lib.runtime.BlazeCommandDispatcher; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.Command; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.OsUtils; import com.google.devtools.build.lib.util.StringUtilities; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsProvider; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryUsage; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * Implementation of 'blaze info'. */ @Command(name = "info", // TODO(bazel-team): this is not really a build command, but needs access to the // configuration options to do its job builds = true, allowResidue = true, binaryStdOut = true, help = "resource:info.txt", shortDescription = "Displays runtime info about the %{product} server.", options = { InfoCommand.Options.class }, completion = "info-key", // We have InfoCommand inherit from {@link BuildCommand} because we want all // configuration defaults specified in ~/.blazerc for {@code build} to apply to // {@code info} too, even though it doesn't actually do a build. // // (Ideally there would be a way to make {@code info} inherit just the bare // minimum of relevant options from {@code build}, i.e. those that affect the // values it prints. But there's no such mechanism.) inherits = { BuildCommand.class }) public class InfoCommand implements BlazeCommand { public static class Options extends OptionsBase { @Option(name = "show_make_env", defaultValue = "false", category = "misc", help = "Include the \"Make\" environment in the output.") public boolean showMakeEnvironment; } /** * Unchecked variant of ExitCausingException. Below, we need to throw from the Supplier interface, * which does not allow checked exceptions. */ public static class ExitCausingRuntimeException extends RuntimeException { private final ExitCode exitCode; public ExitCausingRuntimeException(String message, ExitCode exitCode) { super(message); this.exitCode = exitCode; } public ExitCausingRuntimeException(ExitCode exitCode) { this.exitCode = exitCode; } public ExitCode getExitCode() { return exitCode; } } private static class HardwiredInfoItem implements BlazeModule.InfoItem { private final InfoKey key; private final BlazeRuntime runtime; private final OptionsProvider commandOptions; private HardwiredInfoItem(InfoKey key, BlazeRuntime runtime, OptionsProvider commandOptions) { this.key = key; this.runtime = runtime; this.commandOptions = commandOptions; } @Override public String getName() { return key.getName(); } @Override public String getDescription() { return key.getDescription(); } @Override public boolean isHidden() { return key.isHidden(); } @Override public byte[] get(Supplier<BuildConfiguration> configurationSupplier) { return print(getInfoItem(runtime, key, configurationSupplier, commandOptions)); } } private static class MakeInfoItem implements BlazeModule.InfoItem { private final String name; private final String value; private MakeInfoItem(String name, String value) { this.name = name; this.value = value; } @Override public String getName() { return name; } @Override public String getDescription() { return "Make environment variable '" + name + "'"; } @Override public boolean isHidden() { return false; } @Override public byte[] get(Supplier<BuildConfiguration> configurationSupplier) { return print(value); } } @Override public void editOptions(CommandEnvironment env, OptionsParser optionsParser) { } @Override public ExitCode exec(final CommandEnvironment env, final OptionsProvider optionsProvider) { final BlazeRuntime runtime = env.getRuntime(); env.getReporter().switchToAnsiAllowingHandler(); Options infoOptions = optionsProvider.getOptions(Options.class); OutErr outErr = env.getReporter().getOutErr(); // Creating a BuildConfiguration is expensive and often unnecessary. Delay the creation until // it is needed. Supplier<BuildConfiguration> configurationSupplier = new Supplier<BuildConfiguration>() { private BuildConfiguration configuration; @Override public BuildConfiguration get() { if (configuration != null) { return configuration; } try { // In order to be able to answer configuration-specific queries, we need to setup the // package path. Since info inherits all the build options, all the necessary information // is available here. runtime.setupPackageCache( optionsProvider.getOptions(PackageCacheOptions.class), runtime.getDefaultsPackageContent(optionsProvider)); // TODO(bazel-team): What if there are multiple configurations? [multi-config] configuration = runtime .getConfigurations(optionsProvider) .getTargetConfigurations().get(0); return configuration; } catch (InvalidConfigurationException e) { env.getReporter().handle(Event.error(e.getMessage())); throw new ExitCausingRuntimeException(ExitCode.COMMAND_LINE_ERROR); } catch (AbruptExitException e) { throw new ExitCausingRuntimeException("unknown error: " + e.getMessage(), e.getExitCode()); } catch (InterruptedException e) { env.getReporter().handle(Event.error("interrupted")); throw new ExitCausingRuntimeException(ExitCode.INTERRUPTED); } } }; Map<String, BlazeModule.InfoItem> items = getInfoItemMap(runtime, optionsProvider); try { if (infoOptions.showMakeEnvironment) { Map<String, String> makeEnv = configurationSupplier.get().getMakeEnvironment(); for (Map.Entry<String, String> entry : makeEnv.entrySet()) { BlazeModule.InfoItem item = new MakeInfoItem(entry.getKey(), entry.getValue()); items.put(item.getName(), item); } } List<String> residue = optionsProvider.getResidue(); if (residue.size() > 1) { env.getReporter().handle(Event.error("at most one key may be specified")); return ExitCode.COMMAND_LINE_ERROR; } String key = residue.size() == 1 ? residue.get(0) : null; if (key != null) { // print just the value for the specified key: byte[] value; if (items.containsKey(key)) { value = items.get(key).get(configurationSupplier); } else { env.getReporter().handle(Event.error("unknown key: '" + key + "'")); return ExitCode.COMMAND_LINE_ERROR; } try { outErr.getOutputStream().write(value); outErr.getOutputStream().flush(); } catch (IOException e) { env.getReporter().handle(Event.error("Cannot write info block: " + e.getMessage())); return ExitCode.ANALYSIS_FAILURE; } } else { // print them all configurationSupplier.get(); // We'll need this later anyway for (BlazeModule.InfoItem infoItem : items.values()) { if (infoItem.isHidden()) { continue; } outErr.getOutputStream().write( (infoItem.getName() + ": ").getBytes(StandardCharsets.UTF_8)); outErr.getOutputStream().write(infoItem.get(configurationSupplier)); } } } catch (AbruptExitException e) { return e.getExitCode(); } catch (ExitCausingRuntimeException e) { return e.getExitCode(); } catch (IOException e) { return ExitCode.LOCAL_ENVIRONMENTAL_ERROR; } return ExitCode.SUCCESS; } /** * Compute and return the info for the given key. Only keys that are not hidden are supported * here. */ private static Object getInfoItem(BlazeRuntime runtime, InfoKey key, Supplier<BuildConfiguration> configurationSupplier, OptionsProvider options) { switch (key) { // directories case WORKSPACE : return runtime.getWorkspace(); case INSTALL_BASE : return runtime.getDirectories().getInstallBase(); case OUTPUT_BASE : return runtime.getOutputBase(); case EXECUTION_ROOT : return runtime.getExecRoot(); case OUTPUT_PATH : return runtime.getDirectories().getOutputPath(); // These are the only (non-hidden) info items that require a configuration, because the // corresponding paths contain the short name. Maybe we should recommend using the symlinks // or make them hidden by default? case BLAZE_BIN : return configurationSupplier.get().getBinDirectory().getPath(); case BLAZE_GENFILES : return configurationSupplier.get().getGenfilesDirectory().getPath(); case BLAZE_TESTLOGS : return configurationSupplier.get().getTestLogsDirectory().getPath(); // logs case COMMAND_LOG : return BlazeCommandDispatcher.getCommandLogPath(runtime.getOutputBase()); case MESSAGE_LOG : // NB: Duplicated in EventLogModule return runtime.getOutputBase().getRelative("message.log"); // misc case RELEASE : return BlazeVersionInfo.instance().getReleaseName(); case SERVER_PID : return OsUtils.getpid(); case PACKAGE_PATH : return getPackagePath(options); // memory statistics case GC_COUNT : case GC_TIME : // The documentation is not very clear on what it means to have more than // one GC MXBean, so we just sum them up. int gcCount = 0; int gcTime = 0; for (GarbageCollectorMXBean gcBean : ManagementFactory.getGarbageCollectorMXBeans()) { gcCount += gcBean.getCollectionCount(); gcTime += gcBean.getCollectionTime(); } if (key == InfoKey.GC_COUNT) { return gcCount + ""; } else { return gcTime + "ms"; } case MAX_HEAP_SIZE : return StringUtilities.prettyPrintBytes(getMemoryUsage().getMax()); case USED_HEAP_SIZE : case COMMITTED_HEAP_SIZE : return StringUtilities.prettyPrintBytes(key == InfoKey.USED_HEAP_SIZE ? getMemoryUsage().getUsed() : getMemoryUsage().getCommitted()); case USED_HEAP_SIZE_AFTER_GC : // Note that this info value is not printed by default, but only when explicitly requested. System.gc(); return StringUtilities.prettyPrintBytes(getMemoryUsage().getUsed()); case DEFAULTS_PACKAGE: return runtime.getDefaultsPackageContent(); case BUILD_LANGUAGE: return getBuildLanguageDefinition(runtime.getRuleClassProvider()); case DEFAULT_PACKAGE_PATH: return Joiner.on(":").join(Constants.DEFAULT_PACKAGE_PATH); default: throw new IllegalArgumentException("missing implementation for " + key); } } private static MemoryUsage getMemoryUsage() { MemoryMXBean memBean = ManagementFactory.getMemoryMXBean(); return memBean.getHeapMemoryUsage(); } /** * Get the package_path variable for the given set of options. */ private static String getPackagePath(OptionsProvider options) { PackageCacheOptions packageCacheOptions = options.getOptions(PackageCacheOptions.class); return Joiner.on(":").join(packageCacheOptions.packagePath); } private static AllowedRuleClassInfo getAllowedRuleClasses( Collection<RuleClass> ruleClasses, Attribute attr) { AllowedRuleClassInfo.Builder info = AllowedRuleClassInfo.newBuilder(); info.setPolicy(AllowedRuleClassInfo.AllowedRuleClasses.ANY); if (attr.isStrictLabelCheckingEnabled() && attr.getAllowedRuleClassesPredicate() != Predicates.<RuleClass>alwaysTrue()) { info.setPolicy(AllowedRuleClassInfo.AllowedRuleClasses.SPECIFIED); Predicate<RuleClass> filter = attr.getAllowedRuleClassesPredicate(); for (RuleClass otherClass : Iterables.filter(ruleClasses, filter)) { if (otherClass.isDocumented()) { info.addAllowedRuleClass(otherClass.getName()); } } } return info.build(); } /** * Returns a byte array containing a proto-buffer describing the build language. */ private static byte[] getBuildLanguageDefinition(RuleClassProvider provider) { BuildLanguage.Builder resultPb = BuildLanguage.newBuilder(); Collection<RuleClass> ruleClasses = provider.getRuleClassMap().values(); for (RuleClass ruleClass : ruleClasses) { if (!ruleClass.isDocumented()) { continue; } RuleDefinition.Builder rulePb = RuleDefinition.newBuilder(); rulePb.setName(ruleClass.getName()); for (Attribute attr : ruleClass.getAttributes()) { if (!attr.isDocumented()) { continue; } AttributeDefinition.Builder attrPb = AttributeDefinition.newBuilder(); attrPb.setName(attr.getName()); // The protocol compiler, in its infinite wisdom, generates the field as one of the // integer type and the getTypeEnum() method is missing. WTF? attrPb.setType(ProtoUtils.getDiscriminatorFromType(attr.getType())); attrPb.setMandatory(attr.isMandatory()); if (Type.isLabelType(attr.getType())) { attrPb.setAllowedRuleClasses(getAllowedRuleClasses(ruleClasses, attr)); } rulePb.addAttribute(attrPb); } resultPb.addRule(rulePb); } return resultPb.build().toByteArray(); } private static byte[] print(Object value) { if (value instanceof byte[]) { return (byte[]) value; } ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter(outputStream); writer.print(value + "\n"); writer.flush(); return outputStream.toByteArray(); } static Map<String, BlazeModule.InfoItem> getInfoItemMap( BlazeRuntime runtime, OptionsProvider commandOptions) { Map<String, BlazeModule.InfoItem> result = new TreeMap<>(); // order by key for (BlazeModule module : runtime.getBlazeModules()) { for (BlazeModule.InfoItem item : module.getInfoItems()) { result.put(item.getName(), item); } } for (InfoKey key : InfoKey.values()) { BlazeModule.InfoItem item = new HardwiredInfoItem(key, runtime, commandOptions); result.put(item.getName(), item); } return result; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; /** * VectorUDAFSumDecimal. Vectorized implementation for SUM aggregates. */ @Description(name = "sum", value = "_FUNC_(expr) - Returns the sum value of expr (vectorized, type: decimal)") public class VectorUDAFSumDecimal extends VectorAggregateExpression { private static final long serialVersionUID = 1L; /** * class for storing the current aggregate value. */ private static final class Aggregation implements AggregationBuffer { private static final long serialVersionUID = 1L; transient private HiveDecimalWritable sum = new HiveDecimalWritable(); transient private boolean isNull; // We use this to catch overflow. transient private boolean isOutOfRange; public void sumValue(HiveDecimalWritable writable, short scale) { if (isOutOfRange) { return; } HiveDecimal value = writable.getHiveDecimal(); if (isNull) { sum.set(value); isNull = false; } else { HiveDecimal result; try { result = sum.getHiveDecimal().add(value); } catch (ArithmeticException e) { // catch on overflow isOutOfRange = true; return; } sum.set(result); } } @Override public int getVariableSize() { throw new UnsupportedOperationException(); } @Override public void reset() { isNull = true; isOutOfRange = false; sum.set(HiveDecimal.ZERO); } } private VectorExpression inputExpression; transient private final HiveDecimalWritable scratchDecimal; public VectorUDAFSumDecimal(VectorExpression inputExpression) { this(); this.inputExpression = inputExpression; } public VectorUDAFSumDecimal() { super(); scratchDecimal = new HiveDecimalWritable(); } private Aggregation getCurrentAggregationBuffer( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, int row) { VectorAggregationBufferRow mySet = aggregationBufferSets[row]; Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex); return myagg; } @Override public void aggregateInputSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, VectorizedRowBatch batch) throws HiveException { int batchSize = batch.size; if (batchSize == 0) { return; } inputExpression.evaluate(batch); DecimalColumnVector inputVector = (DecimalColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.noNulls) { if (inputVector.isRepeating) { iterateNoNullsRepeatingWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], inputVector.scale, batchSize); } else { if (batch.selectedInUse) { iterateNoNullsSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, inputVector.scale, batch.selected, batchSize); } else { iterateNoNullsWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, inputVector.scale, batchSize); } } } else { if (inputVector.isRepeating) { if (batch.selectedInUse) { iterateHasNullsRepeatingSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], inputVector.scale, batchSize, batch.selected, inputVector.isNull); } else { iterateHasNullsRepeatingWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], inputVector.scale, batchSize, inputVector.isNull); } } else { if (batch.selectedInUse) { iterateHasNullsSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, inputVector.scale, batchSize, batch.selected, inputVector.isNull); } else { iterateHasNullsWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector,inputVector.scale, batchSize, inputVector.isNull); } } } } private void iterateNoNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable value, short scale, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(value, scale); } } private void iterateNoNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable[] values, short scale, int[] selection, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(values[selection[i]], scale); } } private void iterateNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable[] values, short scale, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(values[i], scale); } } private void iterateHasNullsRepeatingSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable value, short scale, int batchSize, int[] selection, boolean[] isNull) { for (int i=0; i < batchSize; ++i) { if (!isNull[selection[i]]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(value, scale); } } } private void iterateHasNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable value, short scale, int batchSize, boolean[] isNull) { for (int i=0; i < batchSize; ++i) { if (!isNull[i]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(value, scale); } } } private void iterateHasNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable[] values, short scale, int batchSize, int[] selection, boolean[] isNull) { for (int j=0; j < batchSize; ++j) { int i = selection[j]; if (!isNull[i]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, j); myagg.sumValue(values[i], scale); } } } private void iterateHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, HiveDecimalWritable[] values, short scale, int batchSize, boolean[] isNull) { for (int i=0; i < batchSize; ++i) { if (!isNull[i]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.sumValue(values[i], scale); } } } @Override public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) throws HiveException { inputExpression.evaluate(batch); DecimalColumnVector inputVector = (DecimalColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; int batchSize = batch.size; if (batchSize == 0) { return; } Aggregation myagg = (Aggregation)agg; if (myagg.isOutOfRange) { return; } HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.isRepeating) { if ((inputVector.noNulls) || !inputVector.isNull[0]) { if (myagg.isNull) { myagg.isNull = false; myagg.sum.set(HiveDecimal.ZERO); } HiveDecimal value = vector[0].getHiveDecimal(); HiveDecimal multiple; try { multiple = value.multiply(HiveDecimal.create(batchSize)); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } HiveDecimal result; try { result = myagg.sum.getHiveDecimal().add(multiple); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } myagg.sum.set(result); } return; } if (!batch.selectedInUse && inputVector.noNulls) { iterateNoSelectionNoNulls(myagg, vector, inputVector.scale, batchSize); } else if (!batch.selectedInUse) { iterateNoSelectionHasNulls(myagg, vector, inputVector.scale, batchSize, inputVector.isNull); } else if (inputVector.noNulls){ iterateSelectionNoNulls(myagg, vector, inputVector.scale, batchSize, batch.selected); } else { iterateSelectionHasNulls(myagg, vector, inputVector.scale, batchSize, inputVector.isNull, batch.selected); } } private void iterateSelectionHasNulls( Aggregation myagg, HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull, int[] selected) { for (int j=0; j< batchSize; ++j) { int i = selected[j]; if (!isNull[i]) { if (myagg.isNull) { myagg.isNull = false; myagg.sum.set(HiveDecimal.ZERO); } HiveDecimal value = vector[i].getHiveDecimal(); HiveDecimal result; try { result = myagg.sum.getHiveDecimal().add(value); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } myagg.sum.set(result); } } } private void iterateSelectionNoNulls( Aggregation myagg, HiveDecimalWritable[] vector, short scale, int batchSize, int[] selected) { if (myagg.isNull) { myagg.sum.set(HiveDecimal.ZERO); myagg.isNull = false; } for (int i=0; i< batchSize; ++i) { HiveDecimal value = vector[selected[i]].getHiveDecimal(); HiveDecimal result; try { result = myagg.sum.getHiveDecimal().add(value); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } myagg.sum.set(result); } } private void iterateNoSelectionHasNulls( Aggregation myagg, HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull) { for(int i=0;i<batchSize;++i) { if (!isNull[i]) { if (myagg.isNull) { myagg.sum.set(HiveDecimal.ZERO); myagg.isNull = false; } HiveDecimal value = vector[i].getHiveDecimal(); HiveDecimal result; try { result = myagg.sum.getHiveDecimal().add(value); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } myagg.sum.set(result); } } } private void iterateNoSelectionNoNulls( Aggregation myagg, HiveDecimalWritable[] vector, short scale, int batchSize) { if (myagg.isNull) { myagg.sum.set(HiveDecimal.ZERO); myagg.isNull = false; } for (int i=0;i<batchSize;++i) { HiveDecimal value = vector[i].getHiveDecimal(); HiveDecimal result; try { result = myagg.sum.getHiveDecimal().add(value); } catch (ArithmeticException e) { // catch on overflow myagg.isOutOfRange = true; return; } myagg.sum.set(result); } } @Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { return new Aggregation(); } @Override public void reset(AggregationBuffer agg) throws HiveException { Aggregation myAgg = (Aggregation) agg; myAgg.reset(); } @Override public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull || myagg.isOutOfRange) { return null; } else { return myagg.sum.getHiveDecimal(); } } @Override public ObjectInspector getOutputObjectInspector() { return PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector; } @Override public int getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object(), model.memoryAlign()); } @Override public void init(AggregationDesc desc) throws HiveException { // No-op } public VectorExpression getInputExpression() { return inputExpression; } public void setInputExpression(VectorExpression inputExpression) { this.inputExpression = inputExpression; } }
package xyz.cloudkeeper.model.util; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.ObjectStreamException; import java.io.Serializable; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.function.Supplier; import java.util.stream.Collector; /** * An immutable, random-access list. Does not permit null elements. * * <p>Unlike {@link java.util.Collections#unmodifiableList(List)}, which is a view of a separate list that can still * change, an instance of this class is backed by its own private data and will never change. * * @param <E> the type of elements in this list */ public abstract class ImmutableList<E> extends AbstractList<E> implements Serializable { private static final long serialVersionUID = -1141421109703076753L; private static final String NO_NULL_ELEMENTS_MESSAGE = String.format("Instances of %s do not permit null elements.", ImmutableList.class); private ImmutableList() { } @SuppressWarnings("unchecked") @Nonnull public static <E> ImmutableList<E> copyOf(Collection<? extends E> original) { Objects.requireNonNull(original); if (original instanceof SubList<?>) { SubList<? extends E> list = (SubList<? extends E>) original; return new ArrayBackedList<>( Arrays.copyOfRange(list.originalArray, list.originalFromIndex, list.originalToIndex) ); } else if (original instanceof ImmutableList<?>) { return (ImmutableList<E>) original; } else if (original.isEmpty()) { return of(); } else if (original.size() == 1) { @Nullable E originalElement = original instanceof List<?> ? ((List<? extends E>) original).get(0) : original.iterator().next(); if (originalElement == null) { throw new IllegalArgumentException(NO_NULL_ELEMENTS_MESSAGE); } return new SingletonList<>(originalElement); } else { Object[] newArray = new Object[original.size()]; int i = 0; for (@Nullable E element: original) { if (element == null) { throw new IllegalArgumentException(NO_NULL_ELEMENTS_MESSAGE); } newArray[i] = element; i++; } return new ArrayBackedList<>(newArray); } } /** * Returns a {@code Collector} that accumulates the input elements into a new {@code ImmutableList}. * * <p>There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code List} returned; if more * control over the returned {@code List} is required, use {@link #toCollection(Supplier)}. * * @param <E> the type of the input elements * @return a {@code Collector} which collects all the input elements into an immutable list, in encounter order */ public static <E> Collector<E, ?, ImmutableList<E>> collector() { return Collector.<E, ArrayList<E>, ImmutableList<E>>of( ArrayList::new, List::add, (left, right) -> { left.addAll(right); return left; }, ImmutableList::copyOf ); } @SuppressWarnings("unchecked") public static <E> ImmutableList<E> of() { return (ImmutableList<E>) EmptyList.INSTANCE; } public static <E> ImmutableList<E> of(E singleton) { return new SingletonList<>(singleton); } /** * Returns the index of the first occurrence of the specified element in this list, or -1 if this list does not * contain the element. * * <p>More formally, returns the lowest index {@code i} such that {@code object.equals(get(i))}, or -1 if there is * no such index. * * @param object element to search for * @return the index of the first occurrence of the specified element in this list, or -1 if this list does not * contain the element (a {@code null} argument will always result in -1 being returned) */ @Override public abstract int indexOf(@Nullable Object object); /** * Returns the index of the last occurrence of the specified element in this list, or -1 if this list does not * contain the element. * * <p>More formally, returns the highest index {@code i} such that {@code object.equals(get(i))}, or -1 if there is * no such index. * * @param object element to search for * @return the index of the last occurrence of the specified element in this list, or -1 if this list does not * contain the element (a {@code null} argument will always result in -1 being returned) */ @Override public abstract int lastIndexOf(@Nullable Object object); @Override public abstract ImmutableList<E> subList(int fromIndex, int toIndex); private static String outOfBoundsMsg(int index, int size) { return "Index: " + index + ", Size: " + size; } private static void requireValidIndex(int index, int size) { if (index < 0 || index >= size) { throw new IndexOutOfBoundsException(outOfBoundsMsg(index, size)); } } private static void requireValidSubListArguments(int fromIndex, int toIndex, int size) { if (fromIndex < 0) { throw new IndexOutOfBoundsException("fromIndex = " + fromIndex); } else if (toIndex > size) { throw new IndexOutOfBoundsException("toIndex = " + toIndex); } else if (fromIndex > toIndex) { throw new IllegalArgumentException("fromIndex(" + fromIndex + ") > toIndex(" + toIndex + ')'); } } static final class EmptyList<E> extends ImmutableList<E> { private static final long serialVersionUID = -2157527890236446801L; private static final ImmutableList<?> INSTANCE = new EmptyList<>(); private static final Object[] EMPTY_ARRAY = {}; private Object readResolve() throws ObjectStreamException { return INSTANCE; } @Override public E get(int index) { throw new IndexOutOfBoundsException(outOfBoundsMsg(index, 0)); } @Override public int size() { return 0; } @Override public int indexOf(@Nullable Object object) { Objects.requireNonNull(object); return -1; } @Override public int lastIndexOf(@Nullable Object object) { Objects.requireNonNull(object); return -1; } @Override public ImmutableList<E> subList(int fromIndex, int toIndex) { requireValidSubListArguments(fromIndex, toIndex, 0); return this; } @Override public Object[] toArray() { return EMPTY_ARRAY; } @Override public <T> T[] toArray(T[] targetArray) { Objects.requireNonNull(targetArray); return Objects.requireNonNull(targetArray); } } static final class SingletonList<E> extends ImmutableList<E> { private static final long serialVersionUID = -7452368762256835352L; private final E singleton; private SingletonList(E singleton) { Objects.requireNonNull(singleton, NO_NULL_ELEMENTS_MESSAGE); this.singleton = singleton; } @Override public E get(int index) { requireValidIndex(index, 1); return singleton; } @Override public int size() { return 1; } @Override public int indexOf(@Nullable Object object) { return singleton.equals(object) ? 0 : -1; } @Override public int lastIndexOf(@Nullable Object object) { return singleton.equals(object) ? 0 : -1; } @Override public ImmutableList<E> subList(int fromIndex, int toIndex) { requireValidSubListArguments(fromIndex, toIndex, 1); return fromIndex == 0 && toIndex == 1 ? this : ImmutableList.<E>of(); } } static final class ArrayBackedList<E> extends ImmutableList<E> { private static final long serialVersionUID = 4212701518478728878L; private final Object[] array; private ArrayBackedList(Object[] array) { this.array = array; } @SuppressWarnings("unchecked") @Override public E get(int index) { // If an index is out of bounds, an ArrayIndexOutOfBounds exceptions will be thrown. return (E) array[index]; } @Override public int size() { return array.length; } @Override public int indexOf(@Nullable Object object) { for (int i = 0; i < array.length; i++) { if (array[i].equals(object)) { return i; } } return -1; } @Override public int lastIndexOf(@Nullable Object object) { for (int i = array.length - 1; i >= 0; --i) { if (array[i].equals(object)) { return i; } } return -1; } @Override public ImmutableList<E> subList(int fromIndex, int toIndex) { if (fromIndex == 0 && toIndex == array.length) { return this; } requireValidSubListArguments(fromIndex, toIndex, array.length); if (fromIndex == toIndex) { return ImmutableList.of(); } else if (fromIndex + 1 == toIndex) { @SuppressWarnings("unchecked") E element = (E) array[fromIndex]; return new SingletonList<>(element); } else { return new SubList<>(array, fromIndex, toIndex); } } } static final class SubList<E> extends ImmutableList<E> { private static final long serialVersionUID = -1614112608924916458L; private final Object[] originalArray; private final int originalFromIndex; private final int originalToIndex; private final int size; private SubList(Object[] originalArray, int originalFromIndex, int originalToIndex) { assert originalFromIndex >= 0 && originalFromIndex <= originalToIndex && originalToIndex <= originalArray.length; this.originalArray = originalArray; this.originalFromIndex = originalFromIndex; this.originalToIndex = originalToIndex; size = originalToIndex - originalFromIndex; } @SuppressWarnings("unchecked") @Override public E get(int index) { requireValidIndex(index, size); return (E) originalArray[originalFromIndex + index]; } @Override public int size() { return size; } @Override public int indexOf(@Nullable Object object) { for (int i = originalFromIndex; i < originalToIndex; i++) { if (originalArray[i].equals(object)) { return i; } } return -1; } @Override public int lastIndexOf(@Nullable Object object) { Objects.requireNonNull(object); for (int i = originalToIndex - 1; i >= originalFromIndex; --i) { if (originalArray[i].equals(object)) { return i; } } return -1; } @Override public ImmutableList<E> subList(int fromIndex, int toIndex) { if (fromIndex == 0 && toIndex == size) { return this; } requireValidSubListArguments(fromIndex, toIndex, size); if (fromIndex == toIndex) { return ImmutableList.of(); } else if (fromIndex + 1 == toIndex) { @SuppressWarnings("unchecked") E element = (E) originalArray[originalFromIndex + fromIndex]; return new SingletonList<>(element); } else { return new SubList<>(originalArray, originalFromIndex + fromIndex, originalFromIndex + toIndex); } } } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Structure specifying a 3x4 affine transformation matrix. * * <h5>Valid Usage</h5> * * <ul> * <li>The first three columns of {@code matrix} <b>must</b> define an invertible 3x3 matrix</li> * </ul> * * <h5>See Also</h5> * * <p>{@link VkAccelerationStructureInstanceKHR}, {@link VkAccelerationStructureMatrixMotionInstanceNV}</p> * * <h3>Layout</h3> * * <pre><code> * struct VkTransformMatrixKHR { * float {@link #matrix}[3][4]; * }</code></pre> */ public class VkTransformMatrixKHR extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int MATRIX; static { Layout layout = __struct( __array(4, 3 * 4) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); MATRIX = layout.offsetof(0); } /** * Creates a {@code VkTransformMatrixKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkTransformMatrixKHR(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** a 3x4 row-major affine transformation matrix. */ @NativeType("float[3][4]") public FloatBuffer matrix() { return nmatrix(address()); } /** a 3x4 row-major affine transformation matrix. */ public float matrix(int index) { return nmatrix(address(), index); } /** Copies the specified {@link FloatBuffer} to the {@link #matrix} field. */ public VkTransformMatrixKHR matrix(@NativeType("float[3][4]") FloatBuffer value) { nmatrix(address(), value); return this; } /** Sets the specified value at the specified index of the {@link #matrix} field. */ public VkTransformMatrixKHR matrix(int index, float value) { nmatrix(address(), index, value); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public VkTransformMatrixKHR set(VkTransformMatrixKHR src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code VkTransformMatrixKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkTransformMatrixKHR malloc() { return wrap(VkTransformMatrixKHR.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VkTransformMatrixKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkTransformMatrixKHR calloc() { return wrap(VkTransformMatrixKHR.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VkTransformMatrixKHR} instance allocated with {@link BufferUtils}. */ public static VkTransformMatrixKHR create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VkTransformMatrixKHR.class, memAddress(container), container); } /** Returns a new {@code VkTransformMatrixKHR} instance for the specified memory address. */ public static VkTransformMatrixKHR create(long address) { return wrap(VkTransformMatrixKHR.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkTransformMatrixKHR createSafe(long address) { return address == NULL ? null : wrap(VkTransformMatrixKHR.class, address); } /** * Returns a new {@link VkTransformMatrixKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VkTransformMatrixKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkTransformMatrixKHR.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VkTransformMatrixKHR.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkTransformMatrixKHR.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } /** * Returns a new {@code VkTransformMatrixKHR} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkTransformMatrixKHR malloc(MemoryStack stack) { return wrap(VkTransformMatrixKHR.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VkTransformMatrixKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkTransformMatrixKHR calloc(MemoryStack stack) { return wrap(VkTransformMatrixKHR.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkTransformMatrixKHR.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkTransformMatrixKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkTransformMatrixKHR.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #matrix}. */ public static FloatBuffer nmatrix(long struct) { return memFloatBuffer(struct + VkTransformMatrixKHR.MATRIX, 3 * 4); } /** Unsafe version of {@link #matrix(int) matrix}. */ public static float nmatrix(long struct, int index) { return UNSAFE.getFloat(null, struct + VkTransformMatrixKHR.MATRIX + check(index, 3 * 4) * 4); } /** Unsafe version of {@link #matrix(FloatBuffer) matrix}. */ public static void nmatrix(long struct, FloatBuffer value) { if (CHECKS) { checkGT(value, 3 * 4); } memCopy(memAddress(value), struct + VkTransformMatrixKHR.MATRIX, value.remaining() * 4); } /** Unsafe version of {@link #matrix(int, float) matrix}. */ public static void nmatrix(long struct, int index, float value) { UNSAFE.putFloat(null, struct + VkTransformMatrixKHR.MATRIX + check(index, 3 * 4) * 4, value); } // ----------------------------------- /** An array of {@link VkTransformMatrixKHR} structs. */ public static class Buffer extends StructBuffer<VkTransformMatrixKHR, Buffer> implements NativeResource { private static final VkTransformMatrixKHR ELEMENT_FACTORY = VkTransformMatrixKHR.create(-1L); /** * Creates a new {@code VkTransformMatrixKHR.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkTransformMatrixKHR#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VkTransformMatrixKHR getElementFactory() { return ELEMENT_FACTORY; } /** @return a {@link FloatBuffer} view of the {@link VkTransformMatrixKHR#matrix} field. */ @NativeType("float[3][4]") public FloatBuffer matrix() { return VkTransformMatrixKHR.nmatrix(address()); } /** @return the value at the specified index of the {@link VkTransformMatrixKHR#matrix} field. */ public float matrix(int index) { return VkTransformMatrixKHR.nmatrix(address(), index); } /** Copies the specified {@link FloatBuffer} to the {@link VkTransformMatrixKHR#matrix} field. */ public VkTransformMatrixKHR.Buffer matrix(@NativeType("float[3][4]") FloatBuffer value) { VkTransformMatrixKHR.nmatrix(address(), value); return this; } /** Sets the specified value at the specified index of the {@link VkTransformMatrixKHR#matrix} field. */ public VkTransformMatrixKHR.Buffer matrix(int index, float value) { VkTransformMatrixKHR.nmatrix(address(), index, value); return this; } } }
/** * $RCSfile: ,v $ * $Revision: $ * $Date: $ * * Copyright (C) 2004-2011 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.spark.plugin; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.jivesoftware.resource.SparkRes; import org.jivesoftware.spark.util.URLFileSystem; import org.jivesoftware.spark.util.WinRegistry; import org.jivesoftware.spark.util.log.Log; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class GoogleSearch { final ImageIcon icon; private DocumentBuilder db; String searchUrl; String searchBase; public GoogleSearch() { // Initialize icon to use. icon = SparkRes.getImageIcon(SparkRes.SEARCH_IMAGE_32x32); // Google Desktop API tosearch try { searchUrl = (String) WinRegistry.readString(WinRegistry.HKEY_CURRENT_USER, "Software\\Google\\Google Desktop\\API", "search_url"); searchBase = searchUrl.substring(0, searchUrl.indexOf('/', 8)); db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); } catch (Exception e) { // Nothing to do } } /** * Return the name of the title to use on the Search Tab. * * @return the name of the title to use on the Search Tab. */ public String getTabTitle() { return "Google Desktop"; } /** * Return the icon to use on the Search Tab. * * @return the icon to use. */ public Icon getTabIcon() { return icon; } /** * Executed when a search has been started. * * @param query the query to search on. * @param showFiles Show the files */ public void search(String query, boolean showFiles) { final List<GoogleSearchResult> list = new ArrayList<GoogleSearchResult>(); if (query == null || "".equals(query)) { return; } Document doc = null; try { URL url = new URL(searchUrl + URLEncoder.encode(query, "UTF-8") + "&format=xml&num=50"); String content = URLFileSystem.getContents(url); doc = db.parse(new ByteArrayInputStream(content.getBytes())); } catch (IOException e) { Log.error(e); } catch (SAXException e) { Log.error(e); } if (doc == null) { return; } try { Element e = doc.getDocumentElement(); int count = Integer.parseInt(e.getAttribute("count")); NodeList elems = e.getElementsByTagName("result"); for (int i = 0; i < elems.getLength(); i++) { int relevance = (int)((double)(count - i) / count * 100); GoogleSearchResult result = new GoogleSearchResult(searchBase, query, relevance, (Element)elems.item(i)); list.add(result); } } catch (Exception e1) { Log.error(e1); } if (list.size() > 0) { new GoogleFileViewer().viewFiles(list, showFiles); } } /** * Executed when a search has been started. * * @param query the query to search on. * @param maxDocuments Max documents to return * @return List containing search resilts. */ public List<GoogleSearchResult> searchText(String query, int maxDocuments) { final List<GoogleSearchResult> list = new ArrayList<GoogleSearchResult>(); if (query == null || "".equals(query)) { return null; } Document doc = null; try { URL url = new URL(searchUrl + URLEncoder.encode(query, "UTF-8") + "&format=xml&num=50"); String content = URLFileSystem.getContents(url); doc = db.parse(new ByteArrayInputStream(content.getBytes())); } catch (IOException e) { Log.error(e); } catch (SAXException e) { Log.error(e); } try { Element e = doc.getDocumentElement(); int count = Integer.parseInt(e.getAttribute("count")); NodeList elems = e.getElementsByTagName("result"); for (int i = 0; i < elems.getLength(); i++) { int relevance = (int)((double)(count - i) / count * 100); GoogleSearchResult result = new GoogleSearchResult(searchBase, query, relevance, (Element)elems.item(i)); if (result.getURL().indexOf("googlemail") == -1) { list.add(result); } if (list.size() == maxDocuments) { break; } } } catch (Exception e1) { Log.error(e1); } return list; } /** * Executed when a search has been started. * * @param query the query to search on. */ public void searchConversations(String query) { final List<GoogleSearchResult> list = new ArrayList<GoogleSearchResult>(); if (query == null || "".equals(query)) { return; } Document doc = null; try { URL url = new URL(searchUrl + URLEncoder.encode(query, "UTF-8") + "&format=xml&num=50"); String content = URLFileSystem.getContents(url); doc = db.parse(new ByteArrayInputStream(content.getBytes())); } catch (IOException e) { Log.error(e); } catch (SAXException e) { Log.error(e); } if (doc == null) { return; } try { Element e = doc.getDocumentElement(); int count = Integer.parseInt(e.getAttribute("count")); NodeList elems = e.getElementsByTagName("result"); for (int i = 0; i < elems.getLength(); i++) { int relevance = (int)((double)(count - i) / count * 100); GoogleSearchResult result = new GoogleSearchResult(searchBase, query, relevance, (Element)elems.item(i)); list.add(result); } } catch (Exception e1) { Log.error(e1); } for (GoogleSearchResult result : list) { String url = result.getURL(); System.out.println(url); } } /** * Executed when a search has been started. * * @param query the query to search on. * @return Collection of search documents retreived. */ public Collection<GoogleSearchResult> searchDocuments(String query) { final Set<GoogleSearchResult> set = new HashSet<GoogleSearchResult>(); if (query == null || "".equals(query)) { return null; } Document doc = null; try { URL url = new URL(searchUrl + URLEncoder.encode(query, "UTF-8") + "&format=xml&num=50"); String content = URLFileSystem.getContents(url); doc = db.parse(new ByteArrayInputStream(content.getBytes())); } catch (IOException e) { Log.error(e); } catch (SAXException e) { Log.error(e); } if (doc == null) { return null; } try { Element e = doc.getDocumentElement(); int count = Integer.parseInt(e.getAttribute("count")); NodeList elems = e.getElementsByTagName("result"); for (int i = 0; i < elems.getLength(); i++) { int relevance = (int)((double)(count - i) / count * 100); GoogleSearchResult result = new GoogleSearchResult(searchBase, query, relevance, (Element)elems.item(i)); String url = result.getURL(); File file = new File(url); if (file.exists() && !file.getName().endsWith(".class")) { boolean exists = false; for (GoogleSearchResult r : set) { if (r.getSubject().equals(result.getSubject())) { exists = true; break; } } if (!exists) { set.add(result); } } } } catch (Exception e1) { Log.error(e1); } return set; } /** * Return true if you wish this to be searched by the Chat Analyzer. * * @return true if you wish this to be searched by the Chat Analyzer. */ public boolean isUsedForChatAnalysis() { return false; } /** * Return the title headers for the result table. Please note that the <code>SearchResult</code> * should return the values in the exact order as this method defines. * * @return the field headers to use. */ public String[] getFieldHeaders() { return new String[]{"Title", "Subject"}; } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.starlark; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable; import com.google.devtools.build.lib.pkgcache.TargetParsingCompleteEvent; import com.google.devtools.build.lib.runtime.StarlarkOptionsParser; import com.google.devtools.build.lib.starlark.util.StarlarkOptionsTestCase; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.common.options.OptionsParsingException; import com.google.devtools.common.options.OptionsParsingResult; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import net.starlark.java.eval.StarlarkInt; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit test for the {@code StarlarkOptionsParser}. */ @RunWith(JUnit4.class) public class StarlarkOptionsParsingTest extends StarlarkOptionsTestCase { private List<Postable> postedEvents; @Before public void addPostableEventHandler() { postedEvents = new ArrayList<>(); reporter.addHandler( new ExtendedEventHandler() { @Override public void post(Postable obj) { postedEvents.add(obj); } @Override public void handle(Event event) {} }); } /** Returns only the posted events of the given class. */ private List<Postable> eventsOfType(Class<? extends Postable> clazz) { return postedEvents.stream() .filter(event -> event.getClass().equals(clazz)) .collect(Collectors.toList()); } // test --flag=value @Test public void testFlagEqualsValueForm() throws Exception { writeBasicIntFlag(); OptionsParsingResult result = parseStarlarkOptions("--//test:my_int_setting=666"); assertThat(result.getStarlarkOptions()).hasSize(1); assertThat(result.getStarlarkOptions().get("//test:my_int_setting")) .isEqualTo(StarlarkInt.of(666)); assertThat(result.getResidue()).isEmpty(); } // test --@main_workspace//flag=value parses out to //flag=value // test --@other_workspace//flag=value parses out to @other_workspace//flag=value @Test public void testFlagNameWithWorkspace() throws Exception { writeBasicIntFlag(); scratch.file("test/repo2/WORKSPACE"); scratch.file( "test/repo2/defs.bzl", "def _impl(ctx):", " pass", "my_flag = rule(", " implementation = _impl,", " build_setting = config.int(flag = True),", ")"); scratch.file( "test/repo2/BUILD", "load(':defs.bzl', 'my_flag')", "my_flag(name = 'flag2', build_setting_default=2)"); rewriteWorkspace( "workspace(name = 'starlark_options_test')", "local_repository(", " name = 'repo2',", " path = 'test/repo2',", ")"); OptionsParsingResult result = parseStarlarkOptions( "--@starlark_options_test//test:my_int_setting=666 --@repo2//:flag2=222"); assertThat(result.getStarlarkOptions()).hasSize(2); assertThat(result.getStarlarkOptions().get("//test:my_int_setting")) .isEqualTo(StarlarkInt.of(666)); assertThat(result.getStarlarkOptions().get("@repo2//:flag2")).isEqualTo(StarlarkInt.of(222)); assertThat(result.getResidue()).isEmpty(); } // test --fake_flag=value @Test public void testBadFlag_equalsForm() throws Exception { scratch.file("test/BUILD"); reporter.removeHandler(failFastHandler); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--//fake_flag=blahblahblah")); assertThat(e).hasMessageThat().contains("Error loading option //fake_flag"); assertThat(e.getInvalidArgument()).isEqualTo("//fake_flag"); } // test --fake_flag @Test public void testBadFlag_boolForm() throws Exception { scratch.file("test/BUILD"); reporter.removeHandler(failFastHandler); OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parseStarlarkOptions("--//fake_flag")); assertThat(e).hasMessageThat().contains("Error loading option //fake_flag"); assertThat(e.getInvalidArgument()).isEqualTo("//fake_flag"); } @Test public void testBadFlag_keepGoing() throws Exception { optionsParser.parse("--keep_going"); scratch.file("test/BUILD"); reporter.removeHandler(failFastHandler); OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parseStarlarkOptions("--//fake_flag")); assertThat(e).hasMessageThat().contains("Error loading option //fake_flag"); assertThat(e.getInvalidArgument()).isEqualTo("//fake_flag"); } @Test public void testSingleDash_notAllowed() throws Exception { writeBasicIntFlag(); OptionsParsingResult result = parseStarlarkOptions("-//test:my_int_setting=666"); assertThat(result.getStarlarkOptions()).isEmpty(); assertThat(result.getResidue()).containsExactly("-//test:my_int_setting=666"); } // test --non_flag_setting=value @Test public void testNonFlagParsing() throws Exception { scratch.file( "test/build_setting.bzl", "def _build_setting_impl(ctx):", " return []", "int_flag = rule(", " implementation = _build_setting_impl,", " build_setting = config.int(flag=False)", ")"); scratch.file( "test/BUILD", "load('//test:build_setting.bzl', 'int_flag')", "int_flag(name = 'my_int_setting', build_setting_default = 42)"); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--//test:my_int_setting=666")); assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: //test:my_int_setting=666"); } // test --bool_flag @Test public void testBooleanFlag() throws Exception { writeBasicBoolFlag(); OptionsParsingResult result = parseStarlarkOptions("--//test:my_bool_setting=false"); assertThat(result.getStarlarkOptions()).hasSize(1); assertThat(result.getStarlarkOptions().get("//test:my_bool_setting")).isEqualTo(false); assertThat(result.getResidue()).isEmpty(); } // test --nobool_flag @Test public void testNoPrefixedBooleanFlag() throws Exception { writeBasicBoolFlag(); OptionsParsingResult result = parseStarlarkOptions("--no//test:my_bool_setting"); assertThat(result.getStarlarkOptions()).hasSize(1); assertThat(result.getStarlarkOptions().get("//test:my_bool_setting")).isEqualTo(false); assertThat(result.getResidue()).isEmpty(); } // test --no@main_workspace//:bool_flag @Test public void testNoPrefixedBooleanFlag_withWorkspace() throws Exception { writeBasicBoolFlag(); OptionsParsingResult result = parseStarlarkOptions("--no@//test:my_bool_setting"); assertThat(result.getStarlarkOptions()).hasSize(1); assertThat(result.getStarlarkOptions().get("//test:my_bool_setting")).isEqualTo(false); assertThat(result.getResidue()).isEmpty(); } // test --noint_flag @Test public void testNoPrefixedNonBooleanFlag() throws Exception { writeBasicIntFlag(); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--no//test:my_int_setting")); assertThat(e) .hasMessageThat() .isEqualTo("Illegal use of 'no' prefix on non-boolean option: //test:my_int_setting"); } // test --int_flag @Test public void testFlagWithoutValue() throws Exception { writeBasicIntFlag(); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--//test:my_int_setting")); assertThat(e).hasMessageThat().isEqualTo("Expected value after --//test:my_int_setting"); } // test --flag --flag @Test public void testRepeatFlagLastOneWins() throws Exception { writeBasicIntFlag(); OptionsParsingResult result = parseStarlarkOptions("--//test:my_int_setting=4 --//test:my_int_setting=7"); assertThat(result.getStarlarkOptions()).hasSize(1); assertThat(result.getStarlarkOptions().get("//test:my_int_setting")) .isEqualTo(StarlarkInt.of(7)); assertThat(result.getResidue()).isEmpty(); } // test --flagA=valueA --flagB=valueB @Test public void testMultipleFlags() throws Exception { scratch.file( "test/build_setting.bzl", "def _build_setting_impl(ctx):", " return []", "int_flag = rule(", " implementation = _build_setting_impl,", " build_setting = config.int(flag=True)", ")"); scratch.file( "test/BUILD", "load('//test:build_setting.bzl', 'int_flag')", "int_flag(name = 'my_int_setting', build_setting_default = 42)", "int_flag(name = 'my_other_int_setting', build_setting_default = 77)"); OptionsParsingResult result = parseStarlarkOptions("--//test:my_int_setting=0 --//test:my_other_int_setting=0"); assertThat(result.getResidue()).isEmpty(); assertThat(result.getStarlarkOptions()).hasSize(2); assertThat(result.getStarlarkOptions().get("//test:my_int_setting")) .isEqualTo(StarlarkInt.of(0)); assertThat(result.getStarlarkOptions().get("//test:my_other_int_setting")) .isEqualTo(StarlarkInt.of(0)); } // test --non_build_setting @Test public void testNonBuildSetting() throws Exception { scratch.file( "test/rules.bzl", "def _impl(ctx):", " return []", "my_rule = rule(", " implementation = _impl,", ")"); scratch.file("test/BUILD", "load('//test:rules.bzl', 'my_rule')", "my_rule(name = 'my_rule')"); OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parseStarlarkOptions("--//test:my_rule")); assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: //test:my_rule"); } // test --non_rule_configured_target @Test public void testNonRuleConfiguredTarget() throws Exception { scratch.file( "test/BUILD", "genrule(", " name = 'my_gen',", " srcs = ['x.in'],", " outs = ['x.cc'],", " cmd = '$(locations :tool) $< >$@',", " tools = [':tool'],", ")", "cc_library(name = 'tool-dep')"); OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parseStarlarkOptions("--//test:x.in")); assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: //test:x.in"); } // test --int_flag=non_int_value @Test public void testWrongValueType_int() throws Exception { writeBasicIntFlag(); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--//test:my_int_setting=woohoo")); assertThat(e) .hasMessageThat() .isEqualTo("While parsing option //test:my_int_setting=woohoo: 'woohoo' is not a int"); } // test --bool_flag=non_bool_value @Test public void testWrongValueType_bool() throws Exception { writeBasicBoolFlag(); OptionsParsingException e = assertThrows( OptionsParsingException.class, () -> parseStarlarkOptions("--//test:my_bool_setting=woohoo")); assertThat(e) .hasMessageThat() .isEqualTo("While parsing option //test:my_bool_setting=woohoo: 'woohoo' is not a boolean"); } // test --int-flag=same value as default @Test public void testDontStoreDefaultValue() throws Exception { // build_setting_default = 42 writeBasicIntFlag(); OptionsParsingResult result = parseStarlarkOptions("--//test:my_int_setting=42"); assertThat(result.getStarlarkOptions()).isEmpty(); } @Test public void testOptionsAreParsedWithBuildTestsOnly() throws Exception { writeBasicIntFlag(); optionsParser.parse("--build_tests_only"); OptionsParsingResult result = parseStarlarkOptions("--//test:my_int_setting=15"); assertThat(result.getStarlarkOptions().get("//test:my_int_setting")) .isEqualTo(StarlarkInt.of(15)); } @Test public void testRemoveStarlarkOptionsWorks() throws Exception { Pair<ImmutableList<String>, ImmutableList<String>> residueAndStarlarkOptions = StarlarkOptionsParser.removeStarlarkOptions( ImmutableList.of( "--//local/starlark/option", "--//local/starlark/option=with_value", "--@some_repo//external/starlark/option", "--@some_repo//external/starlark/option=with_value", "--@//main/repo/option", "--@//main/repo/option=with_value", "some-random-residue", "--mangled//external/starlark/option", "--mangled//external/starlark/option=with_value")); assertThat(residueAndStarlarkOptions.getFirst()) .containsExactly( "--//local/starlark/option", "--//local/starlark/option=with_value", "--@some_repo//external/starlark/option", "--@some_repo//external/starlark/option=with_value", "--@//main/repo/option", "--@//main/repo/option=with_value"); assertThat(residueAndStarlarkOptions.getSecond()) .containsExactly( "some-random-residue", "--mangled//external/starlark/option", "--mangled//external/starlark/option=with_value"); } /** * When Starlark flags are only set as flags, they shouldn't produce {@link * TargetParsingCompleteEvent}s. That's intended to communicate (to the build event protocol) * which of the targets in {@code blaze build //foo:all //bar:all} were built. */ @Test public void testExpectedBuildEventOutput_asFlag() throws Exception { writeBasicIntFlag(); scratch.file("blah/BUILD", "cc_library(name = 'mylib')"); useConfiguration(ImmutableMap.of("//test:my_int_setting", "15")); update( ImmutableList.of("//blah:mylib"), /*keepGoing=*/ false, /*loadingPhaseThreads=*/ LOADING_PHASE_THREADS, /*doAnalysis*/ true, eventBus); List<Postable> targetParsingCompleteEvents = eventsOfType(TargetParsingCompleteEvent.class); assertThat(targetParsingCompleteEvents).hasSize(1); assertThat( ((TargetParsingCompleteEvent) targetParsingCompleteEvents.get(0)) .getOriginalTargetPattern()) .containsExactly("//blah:mylib"); } /** * But Starlark are also targets. When they're requested as normal build targets they should * produce {@link TargetParsingCompleteEvent} just like any other target. */ @Test public void testExpectedBuildEventOutput_asTarget() throws Exception { writeBasicIntFlag(); scratch.file("blah/BUILD", "cc_library(name = 'mylib')"); useConfiguration(ImmutableMap.of("//test:my_int_setting", "15")); update( ImmutableList.of("//blah:mylib", "//test:my_int_setting"), /*keepGoing=*/ false, /*loadingPhaseThreads=*/ LOADING_PHASE_THREADS, /*doAnalysis*/ true, eventBus); List<Postable> targetParsingCompleteEvents = eventsOfType(TargetParsingCompleteEvent.class); assertThat(targetParsingCompleteEvents).hasSize(1); assertThat( ((TargetParsingCompleteEvent) targetParsingCompleteEvents.get(0)) .getOriginalTargetPattern()) .containsExactly("//blah:mylib", "//test:my_int_setting"); } @Test @SuppressWarnings("unchecked") public void testAllowMultipleStringFlag() throws Exception { scratch.file( "test/build_setting.bzl", "def _build_setting_impl(ctx):", " return []", "allow_multiple_flag = rule(", " implementation = _build_setting_impl,", " build_setting = config.string(flag=True, allow_multiple=True)", ")"); scratch.file( "test/BUILD", "load('//test:build_setting.bzl', 'allow_multiple_flag')", "allow_multiple_flag(name = 'cats', build_setting_default = 'tabby')"); OptionsParsingResult result = parseStarlarkOptions("--//test:cats=calico --//test:cats=bengal"); assertThat(result.getStarlarkOptions().keySet()).containsExactly("//test:cats"); assertThat((List<String>) result.getStarlarkOptions().get("//test:cats")) .containsExactly("calico", "bengal"); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.HBaseTestingUtility.assertKVListsEqual; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; /** * Test various seek optimizations for correctness and check if they are * actually saving I/O operations. */ @RunWith(Parameterized.class) @Category(MediumTests.class) public class TestSeekOptimizations { private static final Log LOG = LogFactory.getLog(TestSeekOptimizations.class); // Constants private static final String FAMILY = "myCF"; private static final byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); private static final int PUTS_PER_ROW_COL = 50; private static final int DELETES_PER_ROW_COL = 10; private static final int NUM_ROWS = 3; private static final int NUM_COLS = 3; private static final boolean VERBOSE = false; /** * Disable this when this test fails hopelessly and you need to debug a * simpler case. */ private static final boolean USE_MANY_STORE_FILES = true; private static final int[][] COLUMN_SETS = new int[][] { {}, // All columns {0}, {1}, {0, 2}, {1, 2}, {0, 1, 2}, }; // Both start row and end row are inclusive here for the purposes of this // test. private static final int[][] ROW_RANGES = new int[][] { {-1, -1}, {0, 1}, {1, 1}, {1, 2}, {0, 2} }; private static final int[] MAX_VERSIONS_VALUES = new int[] { 1, 2 }; // Instance variables private HRegion region; private Put put; private Delete del; private Random rand; private Set<Long> putTimestamps = new HashSet<Long>(); private Set<Long> delTimestamps = new HashSet<Long>(); private List<Cell> expectedKVs = new ArrayList<Cell>(); private Compression.Algorithm comprAlgo; private BloomType bloomType; private long totalSeekDiligent, totalSeekLazy; private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); @Parameters public static final Collection<Object[]> parameters() { return HBaseTestingUtility.BLOOM_AND_COMPRESSION_COMBINATIONS; } public TestSeekOptimizations(Compression.Algorithm comprAlgo, BloomType bloomType) { this.comprAlgo = comprAlgo; this.bloomType = bloomType; } @Before public void setUp() { rand = new Random(91238123L); expectedKVs.clear(); } @Test public void testMultipleTimestampRanges() throws IOException { // enable seek counting StoreFileScanner.instrument(); region = TEST_UTIL.createTestRegion("testMultipleTimestampRanges", new HColumnDescriptor(FAMILY) .setCompressionType(comprAlgo) .setBloomFilterType(bloomType) .setMaxVersions(3) ); // Delete the given timestamp and everything before. final long latestDelTS = USE_MANY_STORE_FILES ? 1397 : -1; createTimestampRange(1, 50, -1); createTimestampRange(51, 100, -1); if (USE_MANY_STORE_FILES) { createTimestampRange(100, 500, 127); createTimestampRange(900, 1300, -1); createTimestampRange(1301, 2500, latestDelTS); createTimestampRange(2502, 2598, -1); createTimestampRange(2599, 2999, -1); } prepareExpectedKVs(latestDelTS); for (int[] columnArr : COLUMN_SETS) { for (int[] rowRange : ROW_RANGES) { for (int maxVersions : MAX_VERSIONS_VALUES) { for (boolean lazySeekEnabled : new boolean[] { false, true }) { testScan(columnArr, lazySeekEnabled, rowRange[0], rowRange[1], maxVersions); } } } } final double seekSavings = 1 - totalSeekLazy * 1.0 / totalSeekDiligent; System.err.println("For bloom=" + bloomType + ", compr=" + comprAlgo + " total seeks without optimization: " + totalSeekDiligent + ", with optimization: " + totalSeekLazy + " (" + String.format("%.2f%%", totalSeekLazy * 100.0 / totalSeekDiligent) + "), savings: " + String.format("%.2f%%", 100.0 * seekSavings) + "\n"); // Test that lazy seeks are buying us something. Without the actual // implementation of the lazy seek optimization this will be 0. final double expectedSeekSavings = 0.0; assertTrue("Lazy seek is only saving " + String.format("%.2f%%", seekSavings * 100) + " seeks but should " + "save at least " + String.format("%.2f%%", expectedSeekSavings * 100), seekSavings >= expectedSeekSavings); } private void testScan(final int[] columnArr, final boolean lazySeekEnabled, final int startRow, final int endRow, int maxVersions) throws IOException { StoreScanner.enableLazySeekGlobally(lazySeekEnabled); final Scan scan = new Scan(); final Set<String> qualSet = new HashSet<String>(); for (int iColumn : columnArr) { String qualStr = getQualStr(iColumn); scan.addColumn(FAMILY_BYTES, Bytes.toBytes(qualStr)); qualSet.add(qualStr); } scan.setMaxVersions(maxVersions); scan.setStartRow(rowBytes(startRow)); // Adjust for the fact that for multi-row queries the end row is exclusive. { final byte[] scannerStopRow = rowBytes(endRow + (startRow != endRow ? 1 : 0)); scan.setStopRow(scannerStopRow); } final long initialSeekCount = StoreFileScanner.getSeekCount(); final InternalScanner scanner = region.getScanner(scan); final List<Cell> results = new ArrayList<Cell>(); final List<Cell> actualKVs = new ArrayList<Cell>(); // Such a clumsy do-while loop appears to be the official way to use an // internalScanner. scanner.next() return value refers to the _next_ // result, not to the one already returned in results. boolean hasNext; do { hasNext = scanner.next(results); actualKVs.addAll(results); results.clear(); } while (hasNext); List<Cell> filteredKVs = filterExpectedResults(qualSet, rowBytes(startRow), rowBytes(endRow), maxVersions); final String rowRestrictionStr = (startRow == -1 && endRow == -1) ? "all rows" : ( startRow == endRow ? ("row=" + startRow) : ("startRow=" + startRow + ", " + "endRow=" + endRow)); final String columnRestrictionStr = columnArr.length == 0 ? "all columns" : ("columns=" + Arrays.toString(columnArr)); final String testDesc = "Bloom=" + bloomType + ", compr=" + comprAlgo + ", " + (scan.isGetScan() ? "Get" : "Scan") + ": " + columnRestrictionStr + ", " + rowRestrictionStr + ", maxVersions=" + maxVersions + ", lazySeek=" + lazySeekEnabled; long seekCount = StoreFileScanner.getSeekCount() - initialSeekCount; if (VERBOSE) { System.err.println("Seek count: " + seekCount + ", KVs returned: " + actualKVs.size() + ". " + testDesc + (lazySeekEnabled ? "\n" : "")); } if (lazySeekEnabled) { totalSeekLazy += seekCount; } else { totalSeekDiligent += seekCount; } assertKVListsEqual(testDesc, filteredKVs, actualKVs); } private List<Cell> filterExpectedResults(Set<String> qualSet, byte[] startRow, byte[] endRow, int maxVersions) { final List<Cell> filteredKVs = new ArrayList<Cell>(); final Map<String, Integer> verCount = new HashMap<String, Integer>(); for (Cell kv : expectedKVs) { if (startRow.length > 0 && Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), startRow, 0, startRow.length) < 0) { continue; } // In this unit test the end row is always inclusive. if (endRow.length > 0 && Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), endRow, 0, endRow.length) > 0) { continue; } if (!qualSet.isEmpty() && (!CellUtil.matchingFamily(kv, FAMILY_BYTES) || !qualSet.contains(Bytes.toString(CellUtil.cloneQualifier(kv))))) { continue; } final String rowColStr = Bytes.toStringBinary(CellUtil.cloneRow(kv)) + "/" + Bytes.toStringBinary(CellUtil.cloneFamily(kv)) + ":" + Bytes.toStringBinary(CellUtil.cloneQualifier(kv)); final Integer curNumVer = verCount.get(rowColStr); final int newNumVer = curNumVer != null ? (curNumVer + 1) : 1; if (newNumVer <= maxVersions) { filteredKVs.add(kv); verCount.put(rowColStr, newNumVer); } } return filteredKVs; } private void prepareExpectedKVs(long latestDelTS) { final List<Cell> filteredKVs = new ArrayList<Cell>(); for (Cell kv : expectedKVs) { if (kv.getTimestamp() > latestDelTS || latestDelTS == -1) { filteredKVs.add(kv); } } expectedKVs = filteredKVs; Collections.sort(expectedKVs, KeyValue.COMPARATOR); } public void put(String qual, long ts) { if (!putTimestamps.contains(ts)) { put.add(FAMILY_BYTES, Bytes.toBytes(qual), ts, createValue(ts)); putTimestamps.add(ts); } if (VERBOSE) { LOG.info("put: row " + Bytes.toStringBinary(put.getRow()) + ", cf " + FAMILY + ", qualifier " + qual + ", ts " + ts); } } private byte[] createValue(long ts) { return Bytes.toBytes("value" + ts); } public void delAtTimestamp(String qual, long ts) { del.deleteColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts); logDelete(qual, ts, "at"); } private void logDelete(String qual, long ts, String delType) { if (VERBOSE) { LOG.info("del " + delType + ": row " + Bytes.toStringBinary(put.getRow()) + ", cf " + FAMILY + ", qualifier " + qual + ", ts " + ts); } } private void delUpToTimestamp(String qual, long upToTS) { del.deleteColumns(FAMILY_BYTES, Bytes.toBytes(qual), upToTS); logDelete(qual, upToTS, "up to and including"); } private long randLong(long n) { long l = rand.nextLong(); if (l == Long.MIN_VALUE) l = Long.MAX_VALUE; return Math.abs(l) % n; } private long randBetween(long a, long b) { long x = a + randLong(b - a + 1); assertTrue(a <= x && x <= b); return x; } private final String rowStr(int i) { return ("row" + i).intern(); } private final byte[] rowBytes(int i) { if (i == -1) { return HConstants.EMPTY_BYTE_ARRAY; } return Bytes.toBytes(rowStr(i)); } private final String getQualStr(int i) { return ("qual" + i).intern(); } public void createTimestampRange(long minTS, long maxTS, long deleteUpToTS) throws IOException { assertTrue(minTS < maxTS); assertTrue(deleteUpToTS == -1 || (minTS <= deleteUpToTS && deleteUpToTS <= maxTS)); for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { final String row = rowStr(iRow); final byte[] rowBytes = Bytes.toBytes(row); for (int iCol = 0; iCol < NUM_COLS; ++iCol) { final String qual = getQualStr(iCol); final byte[] qualBytes = Bytes.toBytes(qual); put = new Put(rowBytes); putTimestamps.clear(); put(qual, minTS); put(qual, maxTS); for (int i = 0; i < PUTS_PER_ROW_COL; ++i) { put(qual, randBetween(minTS, maxTS)); } long[] putTimestampList = new long[putTimestamps.size()]; { int i = 0; for (long ts : putTimestamps) { putTimestampList[i++] = ts; } } // Delete a predetermined number of particular timestamps delTimestamps.clear(); assertTrue(putTimestampList.length >= DELETES_PER_ROW_COL); int numToDel = DELETES_PER_ROW_COL; int tsRemaining = putTimestampList.length; del = new Delete(rowBytes); for (long ts : putTimestampList) { if (rand.nextInt(tsRemaining) < numToDel) { delAtTimestamp(qual, ts); putTimestamps.remove(ts); --numToDel; } if (--tsRemaining == 0) { break; } } // Another type of delete: everything up to the given timestamp. if (deleteUpToTS != -1) { delUpToTimestamp(qual, deleteUpToTS); } region.put(put); if (!del.isEmpty()) { region.delete(del); } // Add remaining timestamps (those we have not deleted) to expected // results for (long ts : putTimestamps) { expectedKVs.add(new KeyValue(rowBytes, FAMILY_BYTES, qualBytes, ts, KeyValue.Type.Put)); } } } region.flushcache(); } @After public void tearDown() throws IOException { if (region != null) { HRegion.closeHRegion(region); } // We have to re-set the lazy seek flag back to the default so that other // unit tests are not affected. StoreScanner.enableLazySeekGlobally( StoreScanner.LAZY_SEEK_ENABLED_BY_DEFAULT); } }
/* * Copyright (c) 2000, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.provider.certpath; import java.io.IOException; import java.security.AccessController; import java.security.GeneralSecurityException; import java.security.cert.*; import java.util.*; import sun.security.action.GetBooleanAction; import sun.security.provider.certpath.PKIX.BuilderParams; import sun.security.util.Debug; import sun.security.x509.GeneralNames; import sun.security.x509.GeneralNameInterface; import sun.security.x509.GeneralSubtrees; import sun.security.x509.NameConstraintsExtension; import sun.security.x509.SubjectAlternativeNameExtension; import sun.security.x509.X500Name; import sun.security.x509.X509CertImpl; /** * Abstract class representing a builder, which is able to retrieve * matching certificates and is able to verify a particular certificate. * * @since 1.4 * @author Sean Mullan * @author Yassir Elley */ public abstract class Builder { private static final Debug debug = Debug.getInstance("certpath"); private Set<String> matchingPolicies; final BuilderParams buildParams; final X509CertSelector targetCertConstraints; /** * Flag indicating whether support for the caIssuers field of the * Authority Information Access extension shall be enabled. Currently * disabled by default for compatibility reasons. */ static final boolean USE_AIA = GetBooleanAction .privilegedGetProperty("com.sun.security.enableAIAcaIssuers"); /** * Initialize the builder with the input parameters. * * @param buildParams the parameter set used to build a certification path */ Builder(BuilderParams buildParams) { this.buildParams = buildParams; this.targetCertConstraints = (X509CertSelector)buildParams.targetCertConstraints(); } /** * Retrieves certificates from the list of certStores using the buildParams * and the currentState as a filter * * @param currentState the current State * @param certStores list of CertStores */ abstract Collection<X509Certificate> getMatchingCerts (State currentState, List<CertStore> certStores) throws CertStoreException, CertificateException, IOException; /** * Verifies the cert against the currentState, using the certPathList * generated thus far to help with loop detection * * @param cert the certificate to be verified * @param currentState the current state against which the cert is verified * @param certPathList the certPathList generated thus far */ abstract void verifyCert(X509Certificate cert, State currentState, List<X509Certificate> certPathList) throws GeneralSecurityException; /** * Verifies whether the input certificate completes the path. * When building in the forward direction, a trust anchor will * complete the path. * * @param cert the certificate to test * @return a boolean value indicating whether the cert completes the path. */ abstract boolean isPathCompleted(X509Certificate cert); /** * Adds the certificate to the certPathList * * @param cert the certificate to be added * @param certPathList the certification path list */ abstract void addCertToPath(X509Certificate cert, LinkedList<X509Certificate> certPathList); /** * Removes final certificate from the certPathList * * @param certPathList the certification path list */ abstract void removeFinalCertFromPath (LinkedList<X509Certificate> certPathList); /** * get distance of one GeneralName from another * * @param base GeneralName at base of subtree * @param test GeneralName to be tested against base * @param incomparable the value to return if the names are * incomparable * @return distance of test name from base, where 0 * means exact match, 1 means test is an immediate * child of base, 2 means test is a grandchild, etc. * -1 means test is a parent of base, -2 means test * is a grandparent, etc. */ static int distance(GeneralNameInterface base, GeneralNameInterface test, int incomparable) { switch (base.constrains(test)) { case GeneralNameInterface.NAME_DIFF_TYPE: if (debug != null) { debug.println("Builder.distance(): Names are different types"); } return incomparable; case GeneralNameInterface.NAME_SAME_TYPE: if (debug != null) { debug.println("Builder.distance(): Names are same type but " + "in different subtrees"); } return incomparable; case GeneralNameInterface.NAME_MATCH: return 0; case GeneralNameInterface.NAME_WIDENS: break; case GeneralNameInterface.NAME_NARROWS: break; default: // should never occur return incomparable; } /* names are in same subtree */ return test.subtreeDepth() - base.subtreeDepth(); } /** * get hop distance of one GeneralName from another in links where * the names need not have an ancestor/descendant relationship. * For example, the hop distance from ou=D,ou=C,o=B,c=US to * ou=F,ou=E,ou=C,o=B,c=US is 3: D->C, C->E, E->F. The hop distance * from ou=C,o=B,c=US to ou=D,ou=C,o=B,c=US is -1: C->D * * @param base GeneralName * @param test GeneralName to be tested against base * @param incomparable the value to return if the names are * incomparable * @return distance of test name from base measured in hops in the * namespace hierarchy, where 0 means exact match. Result * is positive if path is some number of up hops followed by * some number of down hops; result is negative if path is * some number of down hops. */ static int hops(GeneralNameInterface base, GeneralNameInterface test, int incomparable) { int baseRtest = base.constrains(test); switch (baseRtest) { case GeneralNameInterface.NAME_DIFF_TYPE: if (debug != null) { debug.println("Builder.hops(): Names are different types"); } return incomparable; case GeneralNameInterface.NAME_SAME_TYPE: /* base and test are in different subtrees */ break; case GeneralNameInterface.NAME_MATCH: /* base matches test */ return 0; case GeneralNameInterface.NAME_WIDENS: /* base is ancestor of test */ return (test.subtreeDepth()-base.subtreeDepth()); case GeneralNameInterface.NAME_NARROWS: /* base is descendant of test */ return (test.subtreeDepth()-base.subtreeDepth()); default: // should never occur return incomparable; } /* names are in different subtrees */ if (base.getType() != GeneralNameInterface.NAME_DIRECTORY) { if (debug != null) { debug.println("Builder.hops(): hopDistance not implemented " + "for this name type"); } return incomparable; } X500Name baseName = (X500Name)base; X500Name testName = (X500Name)test; X500Name commonName = baseName.commonAncestor(testName); if (commonName == null) { if (debug != null) { debug.println("Builder.hops(): Names are in different " + "namespaces"); } return incomparable; } else { int commonDistance = commonName.subtreeDepth(); int baseDistance = baseName.subtreeDepth(); int testDistance = testName.subtreeDepth(); return (baseDistance + testDistance - (2 * commonDistance)); } } /** * Determine how close a given certificate gets you toward * a given target. * * @param constraints Current NameConstraints; if null, * then caller must verify NameConstraints * independently, realizing that this certificate * may not actually lead to the target at all. * @param cert Candidate certificate for chain * @param target GeneralNameInterface name of target * @return distance from this certificate to target: * <ul> * <li>-1 means certificate could be CA for target, but * there are no NameConstraints limiting how close * <li> 0 means certificate subject or subjectAltName * matches target * <li> 1 means certificate is permitted to be CA for * target. * <li> 2 means certificate is permitted to be CA for * parent of target. * <li>&gt;0 in general, means certificate is permitted * to be a CA for this distance higher in the naming * hierarchy than the target, plus 1. * </ul> * <p>Note that the subject and/or subjectAltName of the * candidate cert does not have to be an ancestor of the * target in order to be a CA that can issue a certificate to * the target. In these cases, the target distance is calculated * by inspecting the NameConstraints extension in the candidate * certificate. For example, suppose the target is an X.500 DN with * a value of "CN=mullan,OU=ireland,O=sun,C=us" and the * NameConstraints extension in the candidate certificate * includes a permitted component of "O=sun,C=us", which implies * that the candidate certificate is allowed to issue certs in * the "O=sun,C=us" namespace. The target distance is 3 * ((distance of permitted NC from target) + 1). * The (+1) is added to distinguish the result from the case * which returns (0). * @throws IOException if certificate does not get closer */ static int targetDistance(NameConstraintsExtension constraints, X509Certificate cert, GeneralNameInterface target) throws IOException { /* ensure that certificate satisfies existing name constraints */ if (constraints != null && !constraints.verify(cert)) { throw new IOException("certificate does not satisfy existing name " + "constraints"); } X509CertImpl certImpl; try { certImpl = X509CertImpl.toImpl(cert); } catch (CertificateException e) { throw new IOException("Invalid certificate", e); } /* see if certificate subject matches target */ X500Name subject = X500Name.asX500Name(certImpl.getSubjectX500Principal()); if (subject.equals(target)) { /* match! */ return 0; } SubjectAlternativeNameExtension altNameExt = certImpl.getSubjectAlternativeNameExtension(); if (altNameExt != null) { GeneralNames altNames = altNameExt.get( SubjectAlternativeNameExtension.SUBJECT_NAME); /* see if any alternative name matches target */ if (altNames != null) { for (int j = 0, n = altNames.size(); j < n; j++) { GeneralNameInterface altName = altNames.get(j).getName(); if (altName.equals(target)) { return 0; } } } } /* no exact match; see if certificate can get us to target */ /* first, get NameConstraints out of certificate */ NameConstraintsExtension ncExt = certImpl.getNameConstraintsExtension(); if (ncExt == null) { return -1; } /* merge certificate's NameConstraints with current NameConstraints */ if (constraints != null) { constraints.merge(ncExt); } else { // Make sure we do a clone here, because we're probably // going to modify this object later and we don't want to // be sharing it with a Certificate object! constraints = (NameConstraintsExtension) ncExt.clone(); } if (debug != null) { debug.println("Builder.targetDistance() merged constraints: " + String.valueOf(constraints)); } /* reduce permitted by excluded */ GeneralSubtrees permitted = constraints.get(NameConstraintsExtension.PERMITTED_SUBTREES); GeneralSubtrees excluded = constraints.get(NameConstraintsExtension.EXCLUDED_SUBTREES); if (permitted != null) { permitted.reduce(excluded); } if (debug != null) { debug.println("Builder.targetDistance() reduced constraints: " + permitted); } /* see if new merged constraints allow target */ if (!constraints.verify(target)) { throw new IOException("New certificate not allowed to sign " + "certificate for target"); } /* find distance to target, if any, in permitted */ if (permitted == null) { /* certificate is unconstrained; could sign for anything */ return -1; } for (int i = 0, n = permitted.size(); i < n; i++) { GeneralNameInterface perName = permitted.get(i).getName().getName(); int distance = distance(perName, target, -1); if (distance >= 0) { return (distance + 1); } } /* no matching type in permitted; cert holder could certify target */ return -1; } /** * This method can be used as an optimization to filter out * certificates that do not have policies which are valid. * It returns the set of policies (String OIDs) that should exist in * the certificate policies extension of the certificate that is * needed by the builder. The logic applied is as follows: * <p> * 1) If some initial policies have been set *and* policy mappings are * inhibited, then acceptable certificates are those that include * the ANY_POLICY OID or with policies that intersect with the * initial policies. * 2) If no initial policies have been set *or* policy mappings are * not inhibited then we don't have much to work with. All we know is * that a certificate must have *some* policy because if it didn't * have any policy then the policy tree would become null (and validation * would fail). * * @return the Set of policies any of which must exist in a * cert's certificate policies extension in order for a cert to be selected. */ Set<String> getMatchingPolicies() { if (matchingPolicies != null) { Set<String> initialPolicies = buildParams.initialPolicies(); if ((!initialPolicies.isEmpty()) && (!initialPolicies.contains(PolicyChecker.ANY_POLICY)) && (buildParams.policyMappingInhibited())) { matchingPolicies = new HashSet<>(initialPolicies); matchingPolicies.add(PolicyChecker.ANY_POLICY); } else { // we just return an empty set to make sure that there is // at least a certificate policies extension in the cert matchingPolicies = Collections.<String>emptySet(); } } return matchingPolicies; } /** * Search the specified CertStores and add all certificates matching * selector to resultCerts. Self-signed certs are not useful here * and therefore ignored. * * If the targetCert criterion of the selector is set, only that cert * is examined and the CertStores are not searched. * * If checkAll is true, all CertStores are searched for matching certs. * If false, the method returns as soon as the first CertStore returns * a matching cert(s). * * Returns true iff resultCerts changed (a cert was added to the collection) */ boolean addMatchingCerts(X509CertSelector selector, Collection<CertStore> certStores, Collection<X509Certificate> resultCerts, boolean checkAll) { X509Certificate targetCert = selector.getCertificate(); if (targetCert != null) { // no need to search CertStores if (selector.match(targetCert) && !X509CertImpl.isSelfSigned (targetCert, buildParams.sigProvider())) { if (debug != null) { debug.println("Builder.addMatchingCerts: " + "adding target cert" + "\n SN: " + Debug.toHexString( targetCert.getSerialNumber()) + "\n Subject: " + targetCert.getSubjectX500Principal() + "\n Issuer: " + targetCert.getIssuerX500Principal()); } return resultCerts.add(targetCert); } return false; } boolean add = false; for (CertStore store : certStores) { try { Collection<? extends Certificate> certs = store.getCertificates(selector); for (Certificate cert : certs) { if (!X509CertImpl.isSelfSigned ((X509Certificate)cert, buildParams.sigProvider())) { if (resultCerts.add((X509Certificate)cert)) { add = true; } } } if (!checkAll && add) { return true; } } catch (CertStoreException cse) { // if getCertificates throws a CertStoreException, we ignore // it and move on to the next CertStore if (debug != null) { debug.println("Builder.addMatchingCerts, non-fatal " + "exception retrieving certs: " + cse); cse.printStackTrace(); } } } return add; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2013-2015 Denis Forveille (titou10.titou10@gmail.com) * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.db2.model.plan; import org.jkiss.dbeaver.ext.db2.DB2Constants; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.plan.DBCPlanCostNode; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.utils.CommonUtils; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; /** * DB2 EXPLAIN_OPERATOR table * * @author Denis Forveille */ public class DB2PlanOperator extends DB2PlanNode implements DBCPlanCostNode { private static final String SEL_BASE_SELECT; static { StringBuilder sb = new StringBuilder(1024); sb.append("SELECT *"); sb.append(" FROM %s.%s"); sb.append(" WHERE EXPLAIN_REQUESTER = ?"); // 1 sb.append(" AND EXPLAIN_TIME = ?"); // 2 sb.append(" AND SOURCE_NAME = ?");// 3 sb.append(" AND SOURCE_SCHEMA = ?");// 4 sb.append(" AND SOURCE_VERSION = ?");// 5 sb.append(" AND EXPLAIN_LEVEL = ?");// 6 sb.append(" AND STMTNO = ?");// 7 sb.append(" AND SECTNO = ?");// 8 sb.append(" AND OPERATOR_ID = ?");// 9 sb.append(" ORDER BY %s");// 9 sb.append(" WITH UR"); SEL_BASE_SELECT = sb.toString(); } private DB2PlanStatement db2Statement; private String planTableSchema; private List<DB2PlanOperatorArgument> listArguments; private List<DB2PlanOperatorPredicate> listPredicates; private String displayName; private String nodename; private Integer operatorId; private DB2PlanOperatorType operatorType; private Double totalCost; private Double estimatedCardinality = -1d; // ------------ // Constructors // ------------ public DB2PlanOperator(JDBCSession session, JDBCResultSet dbResult, DB2PlanStatement db2Statement, String planTableSchema) throws SQLException { this.db2Statement = db2Statement; this.planTableSchema = planTableSchema; this.operatorId = JDBCUtils.safeGetInteger(dbResult, "OPERATOR_ID"); this.operatorType = CommonUtils.valueOf(DB2PlanOperatorType.class, JDBCUtils.safeGetString(dbResult, "OPERATOR_TYPE")); this.totalCost = JDBCUtils.safeGetDouble(dbResult, "TOTAL_COST"); this.nodename = buildName(operatorId); this.displayName = nodename + " - " + operatorType; loadChildren(session); } @Override public void setEstimatedCardinality(Double estimatedCardinality) { // DF: not sure if this rule is correct. Seems to be OK this.estimatedCardinality = Math.max(this.estimatedCardinality, estimatedCardinality); } @Override public String toString() { return displayName; } @Override public String getNodeName() { return nodename; } @Override public String getNodeType() { return CommonUtils.toString(operatorType); } @Override public String getNodeDescription() { return null; } // -------- // Helpers // -------- public static String buildName(Integer operatorId) { return String.valueOf(operatorId); } // ---------------- // Pproperties // ---------------- @Property(viewable = true, order = 1) public DB2PlanOperatorType getOperatorType() { return operatorType; } @Property(viewable = true, order = 2) public Integer getOperatorId() { return operatorId; } @Property(viewable = true, order = 3) public String getDisplayName() { return ""; // Looks better without a name... } @Property(viewable = true, order = 4, format = DB2Constants.PLAN_COST_FORMAT) public Double getTotalCost() { return totalCost; } @Property(viewable = true, order = 5, format = DB2Constants.PLAN_COST_FORMAT) public Double getEstimatedCardinality() { return estimatedCardinality; } @Property(viewable = false, order = 6) public List<DB2PlanOperatorArgument> getArguments() { return listArguments; } @Property(viewable = false, order = 7) public List<DB2PlanOperatorPredicate> getPredicates() { return listPredicates; } // ------------- // Load children // ------------- private void loadChildren(JDBCSession session) throws SQLException { listArguments = new ArrayList<>(); try (JDBCPreparedStatement sqlStmt = session.prepareStatement( String.format(SEL_BASE_SELECT, planTableSchema, "EXPLAIN_ARGUMENT", "ARGUMENT_TYPE"))) { setQueryParameters(sqlStmt); try (JDBCResultSet res = sqlStmt.executeQuery()) { while (res.next()) { listArguments.add(new DB2PlanOperatorArgument(res)); } } } listPredicates = new ArrayList<>(); try (JDBCPreparedStatement sqlStmt = session.prepareStatement( String.format(SEL_BASE_SELECT, planTableSchema, "EXPLAIN_PREDICATE", "PREDICATE_ID"))) { setQueryParameters(sqlStmt); try (JDBCResultSet res = sqlStmt.executeQuery()) { while (res.next()) { listPredicates.add(new DB2PlanOperatorPredicate(res, this)); } } } } private void setQueryParameters(JDBCPreparedStatement sqlStmt) throws SQLException { sqlStmt.setString(1, db2Statement.getExplainRequester()); sqlStmt.setTimestamp(2, db2Statement.getExplainTime()); sqlStmt.setString(3, db2Statement.getSourceName()); sqlStmt.setString(4, db2Statement.getSourceSchema()); sqlStmt.setString(5, db2Statement.getSourceVersion()); sqlStmt.setString(6, db2Statement.getExplainLevel()); sqlStmt.setInt(7, db2Statement.getStmtNo()); sqlStmt.setInt(8, db2Statement.getSectNo()); sqlStmt.setInt(9, operatorId); } @Override public Number getNodeCost() { return totalCost; } @Override public Number getNodePercent() { return null; } @Override public Number getNodeDuration() { return null; } @Override public Number getNodeRowCount() { return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.assertions.gui; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JToggleButton; import javax.swing.ListSelectionModel; import org.apache.jmeter.assertions.ResponseAssertion; import org.apache.jmeter.gui.GUIMenuSortOrder; import org.apache.jmeter.gui.GuiPackage; import org.apache.jmeter.gui.util.HeaderAsPropertyRenderer; import org.apache.jmeter.gui.util.JSyntaxTextArea; import org.apache.jmeter.gui.util.JTextScrollPane; import org.apache.jmeter.gui.util.PowerTableModel; import org.apache.jmeter.gui.util.TextAreaCellRenderer; import org.apache.jmeter.gui.util.TextAreaTableCellEditor; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.property.JMeterProperty; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.GuiUtils; /** * GUI interface for a {@link ResponseAssertion}. */ @GUIMenuSortOrder(1) public class AssertionGui extends AbstractAssertionGui { private static final long serialVersionUID = 240L; /** The name of the table column in the list of patterns. */ private static final String COL_RESOURCE_NAME = "assertion_patterns_to_test"; //$NON-NLS-1$ /** Radio button indicating that the text response should be tested. */ private JRadioButton responseStringButton; /** Radio button indicating that the text of a document should be tested. */ private JRadioButton responseAsDocumentButton; /** Radio button indicating that the URL should be tested. */ private JRadioButton urlButton; /** Radio button indicating that the responseMessage should be tested. */ private JRadioButton responseMessageButton; /** Radio button indicating that the responseCode should be tested. */ private JRadioButton responseCodeButton; /** Radio button indicating that the headers should be tested. */ private JRadioButton responseHeadersButton; /** Radio button indicating that the request headers should be tested. */ private JRadioButton requestHeadersButton; /** Radio button indicating that the request data should be tested. */ private JRadioButton requestDataButton; /** * Checkbox to indicate whether the response should be forced successful * before testing. This is intended for use when checking the status code or * status message. */ private JCheckBox assumeSuccess; /** * Radio button indicating to test if the field contains one of the * patterns. */ private JRadioButton containsBox; /** * Radio button indicating to test if the field matches one of the patterns. */ private JRadioButton matchesBox; /** * Radio button indicating if the field equals the string. */ private JRadioButton equalsBox; /** * Radio button indicating if the field contains the string. */ private JRadioButton substringBox; /** * Checkbox indicating to test that the field does NOT contain/match the * patterns. */ private JCheckBox notBox; /** * Add new OR checkbox. */ private JCheckBox orBox; /** A table of patterns to test against. */ private JTable stringTable; /** Button to delete a pattern. */ private JButton deletePattern; /** Table model for the pattern table. */ private PowerTableModel tableModel; private JSyntaxTextArea alternativeFailureMessage; /** * Create a new AssertionGui panel. */ public AssertionGui() { init(); } @Override public String getLabelResource() { return "assertion_title"; // $NON-NLS-1$ } /* Implements JMeterGUIComponent.createTestElement() */ @Override public TestElement createTestElement() { ResponseAssertion el = new ResponseAssertion(); modifyTestElement(el); return el; } /* Implements JMeterGUIComponent.modifyTestElement(TestElement) */ @Override public void modifyTestElement(TestElement el) { GuiUtils.stopTableEditing(stringTable); configureTestElement(el); if (el instanceof ResponseAssertion) { ResponseAssertion ra = (ResponseAssertion) el; ra.setCustomFailureMessage(alternativeFailureMessage.getText()); saveScopeSettings(ra); ra.clearTestStrings(); String[] testStrings = tableModel.getData().getColumn(COL_RESOURCE_NAME); for (String testString : testStrings) { ra.addTestString(testString); } if (responseStringButton.isSelected()) { ra.setTestFieldResponseData(); } else if (responseAsDocumentButton.isSelected()) { ra.setTestFieldResponseDataAsDocument(); } else if (responseCodeButton.isSelected()) { ra.setTestFieldResponseCode(); } else if (responseMessageButton.isSelected()) { ra.setTestFieldResponseMessage(); } else if (requestHeadersButton.isSelected()) { ra.setTestFieldRequestHeaders(); } else if (requestDataButton.isSelected()) { ra.setTestFieldRequestData(); } else if (responseHeadersButton.isSelected()) { ra.setTestFieldResponseHeaders(); } else { // Assume URL ra.setTestFieldURL(); } ra.setAssumeSuccess(assumeSuccess.isSelected()); if (containsBox.isSelected()) { ra.setToContainsType(); } else if (equalsBox.isSelected()) { ra.setToEqualsType(); } else if (substringBox.isSelected()) { ra.setToSubstringType(); } else { ra.setToMatchType(); } if (notBox.isSelected()) { ra.setToNotType(); } else { ra.unsetNotType(); } if (orBox.isSelected()) { ra.setToOrType(); } else { ra.unsetOrType(); } } } /** * Implements JMeterGUIComponent.clearGui */ @Override public void clearGui() { super.clearGui(); GuiUtils.stopTableEditing(stringTable); tableModel.clearData(); responseStringButton.setSelected(true); urlButton.setSelected(false); responseCodeButton.setSelected(false); responseMessageButton.setSelected(false); requestHeadersButton.setSelected(false); requestDataButton.setSelected(false); responseHeadersButton.setSelected(false); assumeSuccess.setSelected(false); substringBox.setSelected(true); notBox.setSelected(false); orBox.setSelected(false); alternativeFailureMessage.setText(""); //$NON-NLS-1$ } /** * A newly created component can be initialized with the contents of a Test * Element object by calling this method. The component is responsible for * querying the Test Element object for the relevant information to display * in its GUI. * * @param el * the TestElement to configure */ @Override public void configure(TestElement el) { super.configure(el); ResponseAssertion model = (ResponseAssertion) el; showScopeSettings(model, true); if(model.getCustomFailureMessage() != null) { alternativeFailureMessage.setText(model.getCustomFailureMessage()); } if (model.isContainsType()) { containsBox.setSelected(true); } else if (model.isEqualsType()) { equalsBox.setSelected(true); } else if (model.isSubstringType()) { substringBox.setSelected(true); } else { matchesBox.setSelected(true); } notBox.setSelected(model.isNotType()); orBox.setSelected(model.isOrType()); if (model.isTestFieldResponseData()) { responseStringButton.setSelected(true); } else if (model.isTestFieldResponseDataAsDocument()) { responseAsDocumentButton.setSelected(true); } else if (model.isTestFieldResponseCode()) { responseCodeButton.setSelected(true); } else if (model.isTestFieldResponseMessage()) { responseMessageButton.setSelected(true); } else if (model.isTestFieldRequestHeaders()) { requestHeadersButton.setSelected(true); } else if (model.isTestFieldRequestData()) { requestDataButton.setSelected(true); } else if (model.isTestFieldResponseHeaders()) { responseHeadersButton.setSelected(true); } else // Assume it is the URL { urlButton.setSelected(true); } assumeSuccess.setSelected(model.getAssumeSuccess()); tableModel.clearData(); for (JMeterProperty jMeterProperty : model.getTestStrings()) { tableModel.addRow(new Object[] { jMeterProperty.getStringValue() }); } if (model.getTestStrings().size() == 0) { deletePattern.setEnabled(false); } else { deletePattern.setEnabled(true); } tableModel.fireTableDataChanged(); } /** * Initialize the GUI components and layout. */ void init() { // WARNING: called from ctor so must not be overridden (i.e. must be private or final) setLayout(new BorderLayout()); Box box = Box.createVerticalBox(); setBorder(makeBorder()); box.add(makeTitlePanel()); box.add(createScopePanel(true)); box.add(createFieldPanel()); box.add(createTypePanel()); add(box, BorderLayout.NORTH); add(createStringPanel(), BorderLayout.CENTER); add(createCustomAssertionMessagePanel(), BorderLayout.SOUTH); } /** * Create a panel allowing the user to choose which response field should be * tested. * * @return a new panel for selecting the response field */ private JPanel createFieldPanel() { responseStringButton = new JRadioButton(JMeterUtils.getResString("assertion_text_resp")); //$NON-NLS-1$ responseAsDocumentButton = new JRadioButton(JMeterUtils.getResString("assertion_text_document")); //$NON-NLS-1$ urlButton = new JRadioButton(JMeterUtils.getResString("assertion_url_samp")); //$NON-NLS-1$ responseCodeButton = new JRadioButton(JMeterUtils.getResString("assertion_code_resp")); //$NON-NLS-1$ responseMessageButton = new JRadioButton(JMeterUtils.getResString("assertion_message_resp")); //$NON-NLS-1$ responseHeadersButton = new JRadioButton(JMeterUtils.getResString("assertion_headers")); //$NON-NLS-1$ requestHeadersButton = new JRadioButton(JMeterUtils.getResString("assertion_req_headers")); //$NON-NLS-1$ requestDataButton = new JRadioButton(JMeterUtils.getResString("assertion_req_data")); //$NON-NLS-1$ ButtonGroup group = new ButtonGroup(); group.add(responseStringButton); group.add(responseAsDocumentButton); group.add(urlButton); group.add(responseCodeButton); group.add(responseMessageButton); group.add(requestHeadersButton); group.add(responseHeadersButton); group.add(requestDataButton); responseStringButton.setSelected(true); assumeSuccess = new JCheckBox(JMeterUtils.getResString("assertion_assume_success")); //$NON-NLS-1$ GridBagLayout gridBagLayout = new GridBagLayout(); GridBagConstraints gbc = new GridBagConstraints(); initConstraints(gbc); JPanel panel = new JPanel(gridBagLayout); panel.setBorder(BorderFactory.createTitledBorder(JMeterUtils.getResString("assertion_resp_field"))); //$NON-NLS-1$ addField(panel, responseStringButton, gbc); addField(panel, responseCodeButton, gbc); addField(panel, responseMessageButton, gbc); addField(panel, responseHeadersButton, gbc); resetContraints(gbc); addField(panel, requestHeadersButton, gbc); addField(panel, urlButton, gbc); addField(panel, responseAsDocumentButton, gbc); addField(panel, assumeSuccess, gbc); resetContraints(gbc); addField(panel, requestDataButton, gbc); return panel; } private void addField(JPanel panel, JToggleButton button, GridBagConstraints gbc) { panel.add(button, gbc.clone()); gbc.gridx++; gbc.fill=GridBagConstraints.HORIZONTAL; } // Next line private void resetContraints(GridBagConstraints gbc) { gbc.gridx = 0; gbc.gridy++; gbc.fill=GridBagConstraints.NONE; } private void initConstraints(GridBagConstraints gbc) { gbc.anchor = GridBagConstraints.NORTHWEST; gbc.fill = GridBagConstraints.NONE; gbc.gridheight = 1; gbc.gridwidth = 1; gbc.gridx = 0; gbc.gridy = 0; gbc.weightx = 1; gbc.weighty = 1; } /** * Create a panel allowing the user to choose what type of test should be * performed. * * @return a new panel for selecting the type of assertion test */ private JPanel createTypePanel() { JPanel panel = new JPanel(); panel.setBorder(BorderFactory.createTitledBorder(JMeterUtils.getResString("assertion_pattern_match_rules"))); //$NON-NLS-1$ ButtonGroup group = new ButtonGroup(); containsBox = new JRadioButton(JMeterUtils.getResString("assertion_contains")); //$NON-NLS-1$ group.add(containsBox); containsBox.setSelected(true); panel.add(containsBox); matchesBox = new JRadioButton(JMeterUtils.getResString("assertion_matches")); //$NON-NLS-1$ group.add(matchesBox); panel.add(matchesBox); equalsBox = new JRadioButton(JMeterUtils.getResString("assertion_equals")); //$NON-NLS-1$ group.add(equalsBox); panel.add(equalsBox); substringBox = new JRadioButton(JMeterUtils.getResString("assertion_substring")); //$NON-NLS-1$ group.add(substringBox); panel.add(substringBox); notBox = new JCheckBox(JMeterUtils.getResString("assertion_not")); //$NON-NLS-1$ panel.add(notBox); orBox = new JCheckBox(JMeterUtils.getResString("assertion_or")); //$NON-NLS-1$ panel.add(orBox); return panel; } /** * Create a panel allowing the user to supply a list of string patterns to * test against. * * @return a new panel for adding string patterns */ private JPanel createStringPanel() { tableModel = new PowerTableModel(new String[] { COL_RESOURCE_NAME }, new Class[] { String.class }); stringTable = new JTable(tableModel); stringTable.getTableHeader().setDefaultRenderer(new HeaderAsPropertyRenderer()); stringTable.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); JMeterUtils.applyHiDPI(stringTable); TextAreaCellRenderer renderer = new TextAreaCellRenderer(); stringTable.setRowHeight(renderer.getPreferredHeight()); stringTable.setDefaultRenderer(String.class, renderer); stringTable.setDefaultEditor(String.class, new TextAreaTableCellEditor()); stringTable.setPreferredScrollableViewportSize(new Dimension(100, 70)); JPanel panel = new JPanel(); panel.setLayout(new BorderLayout()); panel.setBorder(BorderFactory.createTitledBorder(JMeterUtils.getResString("assertion_patterns_to_test"))); //$NON-NLS-1$ panel.add(new JScrollPane(stringTable), BorderLayout.CENTER); panel.add(createButtonPanel(), BorderLayout.SOUTH); return panel; } private JPanel createCustomAssertionMessagePanel() { JPanel panel = new JPanel(); panel.setBorder(BorderFactory.createTitledBorder(JMeterUtils.getResString("assertion_custom_message"))); //$NON-NLS-1$ alternativeFailureMessage = JSyntaxTextArea.getInstance(3, 80); panel.add(JTextScrollPane.getInstance(alternativeFailureMessage)); return panel; } /** * Create a panel with buttons to add and delete string patterns. * * @return the new panel with add and delete buttons */ private JPanel createButtonPanel() { JButton addPattern = new JButton(JMeterUtils.getResString("add")); //$NON-NLS-1$ addPattern.addActionListener(new AddPatternListener()); JButton addFromClipboardPattern = new JButton(JMeterUtils.getResString("add_from_clipboard")); //$NON-NLS-1$ addFromClipboardPattern.addActionListener(new AddFromClipboardListener()); deletePattern = new JButton(JMeterUtils.getResString("delete")); //$NON-NLS-1$ deletePattern.addActionListener(new ClearPatternsListener()); deletePattern.setEnabled(false); JPanel buttonPanel = new JPanel(); buttonPanel.add(addPattern); buttonPanel.add(addFromClipboardPattern); buttonPanel.add(deletePattern); return buttonPanel; } /** * An ActionListener for deleting a pattern. * */ private class ClearPatternsListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { GuiUtils.cancelEditing(stringTable); int[] rowsSelected = stringTable.getSelectedRows(); stringTable.clearSelection(); if (rowsSelected.length > 0) { for (int i = rowsSelected.length - 1; i >= 0; i--) { tableModel.removeRow(rowsSelected[i]); } tableModel.fireTableDataChanged(); } else { if(tableModel.getRowCount()>0) { tableModel.removeRow(0); tableModel.fireTableDataChanged(); } } if (stringTable.getModel().getRowCount() == 0) { deletePattern.setEnabled(false); } } } /** * An ActionListener for adding a pattern. */ private class AddPatternListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { GuiUtils.stopTableEditing(stringTable); tableModel.addNewRow(); checkButtonsStatus(); tableModel.fireTableDataChanged(); } } /** * An ActionListener for pasting from clipboard */ private class AddFromClipboardListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { deletePattern.setEnabled(true); GuiUtils.stopTableEditing(stringTable); int rowCount = stringTable.getRowCount(); try { String clipboardContent = GuiUtils.getPastedText(); if(clipboardContent == null) { return; } String[] clipboardLines = clipboardContent.split("\n"); for (String clipboardLine : clipboardLines) { tableModel.addRow(new Object[] { clipboardLine.trim() }); } if (stringTable.getRowCount() > rowCount) { checkButtonsStatus(); // Highlight (select) and scroll to the appropriate rows. int rowToSelect = tableModel.getRowCount() - 1; stringTable.setRowSelectionInterval(rowCount, rowToSelect); stringTable.scrollRectToVisible(stringTable.getCellRect(rowCount, 0, true)); } } catch (IOException ioe) { JOptionPane.showMessageDialog(GuiPackage.getInstance().getMainFrame(), "Could not add data from clipboard:\n" + ioe.getLocalizedMessage(), "Error", JOptionPane.ERROR_MESSAGE); } catch (UnsupportedFlavorException ufe) { JOptionPane.showMessageDialog(GuiPackage.getInstance().getMainFrame(), "Could not add retrieve " + DataFlavor.stringFlavor.getHumanPresentableName() + " from clipboard" + ufe.getLocalizedMessage(), "Error", JOptionPane.ERROR_MESSAGE); } tableModel.fireTableDataChanged(); } } protected void checkButtonsStatus() { // Disable DELETE if there are no rows in the table to delete. if (tableModel.getRowCount() == 0) { deletePattern.setEnabled(false); } else { deletePattern.setEnabled(true); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.serde2; import java.util.Properties; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol; import org.apache.thrift.protocol.TField; import org.apache.thrift.protocol.TList; import org.apache.thrift.protocol.TMap; import org.apache.thrift.protocol.TStruct; import org.apache.thrift.transport.TMemoryBuffer; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; /** * TestTCTLSeparatedProtocol. * */ public class TestTCTLSeparatedProtocol extends TestCase { public TestTCTLSeparatedProtocol() throws Exception { } public void testReads() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); String foo = "Hello"; String bar = "World!"; String key = "22"; String value = "TheValue"; String key2 = "24"; String value2 = "TheValueAgain"; byte columnSeparator[] = {1}; byte elementSeparator[] = {2}; byte kvSeparator[] = {3}; trans.write(foo.getBytes(), 0, foo.getBytes().length); trans.write(columnSeparator, 0, 1); trans.write(columnSeparator, 0, 1); trans.write(bar.getBytes(), 0, bar.getBytes().length); trans.write(columnSeparator, 0, 1); trans.write(key.getBytes(), 0, key.getBytes().length); trans.write(kvSeparator, 0, 1); trans.write(value.getBytes(), 0, value.getBytes().length); trans.write(elementSeparator, 0, 1); trans.write(key2.getBytes(), 0, key2.getBytes().length); trans.write(kvSeparator, 0, 1); trans.write(value2.getBytes(), 0, value2.getBytes().length); trans.flush(); // use 3 as the row buffer size to force lots of re-buffering. TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 1024); prot.initialize(new Configuration(), new Properties()); prot.readStructBegin(); prot.readFieldBegin(); String hello = prot.readString(); prot.readFieldEnd(); assertTrue(hello.equals(foo)); prot.readFieldBegin(); assertTrue(prot.readString().equals("")); prot.readFieldEnd(); prot.readFieldBegin(); assertTrue(prot.readString().equals(bar)); prot.readFieldEnd(); prot.readFieldBegin(); TMap mapHeader = prot.readMapBegin(); assertTrue(mapHeader.size == 2); assertTrue(prot.readI32() == 22); assertTrue(prot.readString().equals(value)); assertTrue(prot.readI32() == 24); assertTrue(prot.readString().equals(value2)); prot.readMapEnd(); prot.readFieldEnd(); prot.readFieldBegin(); hello = prot.readString(); prot.readFieldEnd(); assertNull(hello); prot.readStructEnd(); } public void testWrites() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 1024); prot.writeStructBegin(new TStruct()); prot.writeFieldBegin(new TField()); prot.writeI32(100); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeListBegin(new TList()); prot.writeDouble(348.55); prot.writeDouble(234.22); prot.writeListEnd(); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeString("hello world!"); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeMapBegin(new TMap()); prot.writeString("key1"); prot.writeString("val1"); prot.writeString("key2"); prot.writeString("val2"); prot.writeString("key3"); prot.writeString("val3"); prot.writeMapEnd(); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeListBegin(new TList()); prot.writeString("elem1"); prot.writeString("elem2"); prot.writeListEnd(); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeString("bye!"); prot.writeFieldEnd(); prot.writeStructEnd(); trans.flush(); byte[] b = new byte[3 * 1024]; int len = trans.read(b, 0, b.length); String test = new String(b, 0, len); String testRef = "100348.55234.22hello world!key1val1key2val2key3val3elem1elem2bye!"; assertTrue(test.equals(testRef)); trans = new TMemoryBuffer(1023); trans.write(b, 0, len); // // read back! // prot = new TCTLSeparatedProtocol(trans, 10); prot.initialize(new Configuration(), new Properties()); // 100 is the start prot.readStructBegin(); prot.readFieldBegin(); assertTrue(prot.readI32() == 100); prot.readFieldEnd(); // let's see if doubles work ok prot.readFieldBegin(); TList l = prot.readListBegin(); assertTrue(l.size == 2); assertTrue(prot.readDouble() == 348.55); assertTrue(prot.readDouble() == 234.22); prot.readListEnd(); prot.readFieldEnd(); // nice message prot.readFieldBegin(); assertTrue(prot.readString().equals("hello world!")); prot.readFieldEnd(); // 3 element map prot.readFieldBegin(); TMap m = prot.readMapBegin(); assertTrue(m.size == 3); assertTrue(prot.readString().equals("key1")); assertTrue(prot.readString().equals("val1")); assertTrue(prot.readString().equals("key2")); assertTrue(prot.readString().equals("val2")); assertTrue(prot.readString().equals("key3")); assertTrue(prot.readString().equals("val3")); prot.readMapEnd(); prot.readFieldEnd(); // the 2 element list prot.readFieldBegin(); l = prot.readListBegin(); assertTrue(l.size == 2); assertTrue(prot.readString().equals("elem1")); assertTrue(prot.readString().equals("elem2")); prot.readListEnd(); prot.readFieldEnd(); // final string prot.readFieldBegin(); assertTrue(prot.readString().equals("bye!")); prot.readFieldEnd(); // shouldl return nulls at end prot.readFieldBegin(); assertNull(prot.readString()); prot.readFieldEnd(); // shouldl return nulls at end prot.readFieldBegin(); assertNull(prot.readString()); prot.readFieldEnd(); prot.readStructEnd(); } public void testQuotedWrites() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(4096); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 4096); Properties schema = new Properties(); schema.setProperty(serdeConstants.QUOTE_CHAR, "\""); schema.setProperty(serdeConstants.FIELD_DELIM, ","); prot.initialize(new Configuration(), schema); String testStr = "\"hello, world!\""; prot.writeStructBegin(new TStruct()); prot.writeFieldBegin(new TField()); prot.writeString(testStr); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeListBegin(new TList()); prot.writeString("elem1"); prot.writeString("elem2"); prot.writeListEnd(); prot.writeFieldEnd(); prot.writeStructEnd(); prot.writeString("\n"); trans.flush(); byte b[] = new byte[4096]; int len = trans.read(b, 0, b.length); trans = new TMemoryBuffer(4096); trans.write(b, 0, len); prot = new TCTLSeparatedProtocol(trans, 1024); prot.initialize(new Configuration(), schema); prot.readStructBegin(); prot.readFieldBegin(); final String firstRead = prot.readString(); prot.readFieldEnd(); testStr = testStr.replace("\"", ""); assertEquals(testStr, firstRead); // the 2 element list prot.readFieldBegin(); TList l = prot.readListBegin(); assertTrue(l.size == 2); assertTrue(prot.readString().equals("elem1")); assertTrue(prot.readString().equals("elem2")); prot.readListEnd(); prot.readFieldEnd(); // shouldl return nulls at end prot.readFieldBegin(); assertNull(prot.readString()); prot.readFieldEnd(); // shouldl return nulls at end prot.readFieldBegin(); assertNull(prot.readString()); prot.readFieldEnd(); prot.readStructEnd(); } /** * Tests a sample apache log format. This is actually better done in general * with a more TRegexLike protocol, but for this case, TCTLSeparatedProtocol * can do it. */ public void test1ApacheLogFormat() throws Exception { final String sample = "127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326"; TMemoryBuffer trans = new TMemoryBuffer(4096); trans.write(sample.getBytes(), 0, sample.getBytes().length); trans.flush(); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 4096); Properties schema = new Properties(); // this is a hacky way of doing the quotes since it will match any 2 of // these, so // "[ hello this is something to split [" would be considered to be quoted. schema.setProperty(serdeConstants.QUOTE_CHAR, "(\"|\\[|\\])"); schema.setProperty(serdeConstants.FIELD_DELIM, " "); schema.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "-"); prot.initialize(new Configuration(), schema); prot.readStructBegin(); // ip address prot.readFieldBegin(); final String ip = prot.readString(); prot.readFieldEnd(); assertEquals("127.0.0.1", ip); // identd prot.readFieldBegin(); final String identd = prot.readString(); prot.readFieldEnd(); assertNull(identd); // user prot.readFieldBegin(); final String user = prot.readString(); prot.readFieldEnd(); assertEquals("frank", user); // finishTime prot.readFieldBegin(); final String finishTime = prot.readString(); prot.readFieldEnd(); assertEquals("10/Oct/2000:13:55:36 -0700", finishTime); // requestLine prot.readFieldBegin(); final String requestLine = prot.readString(); prot.readFieldEnd(); assertEquals("GET /apache_pb.gif HTTP/1.0", requestLine); // returncode prot.readFieldBegin(); final int returnCode = prot.readI32(); prot.readFieldEnd(); assertEquals(200, returnCode); // return size prot.readFieldBegin(); final int returnSize = prot.readI32(); prot.readFieldEnd(); assertEquals(2326, returnSize); prot.readStructEnd(); } public void testNulls() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 10); prot.initialize(new Configuration(), new Properties()); prot.writeStructBegin(new TStruct()); prot.writeFieldBegin(new TField()); prot.writeString(null); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeString(null); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeI32(100); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeString(null); prot.writeFieldEnd(); prot.writeFieldBegin(new TField()); prot.writeMapBegin(new TMap()); prot.writeString(null); prot.writeString(null); prot.writeString("key2"); prot.writeString(null); prot.writeString(null); prot.writeString("val3"); prot.writeMapEnd(); prot.writeFieldEnd(); prot.writeStructEnd(); byte b[] = new byte[3 * 1024]; int len = trans.read(b, 0, b.length); String written = new String(b, 0, len); String testRef = "\\N\\N100\\N\\N\\Nkey2\\N\\Nval3"; assertTrue(testRef.equals(written)); trans = new TMemoryBuffer(1023); trans.write(b, 0, len); prot = new TCTLSeparatedProtocol(trans, 3); prot.initialize(new Configuration(), new Properties()); prot.readStructBegin(); prot.readFieldBegin(); String ret = prot.readString(); prot.readFieldEnd(); assertNull(ret); prot.readFieldBegin(); ret = prot.readString(); prot.readFieldEnd(); assertNull(ret); prot.readFieldBegin(); int ret1 = prot.readI32(); prot.readFieldEnd(); assertTrue(ret1 == 100); prot.readFieldBegin(); ret1 = prot.readI32(); prot.readFieldEnd(); prot.readFieldBegin(); TMap map = prot.readMapBegin(); assertTrue(map.size == 3); assertNull(prot.readString()); assertNull(prot.readString()); assertTrue(prot.readString().equals("key2")); assertNull(prot.readString()); assertNull(prot.readString()); assertTrue(prot.readString().equals("val3")); prot.readMapEnd(); prot.readFieldEnd(); assertTrue(ret1 == 0); } public void testShouldThrowRunTimeExceptionIfUnableToInitializeTokenizer() throws Exception { TCTLSeparatedProtocol separatedProtocol = new TCTLSeparatedProtocol(new TTransport() { @Override public void close() { } @Override public boolean isOpen() { return false; } @Override public void open() throws TTransportException { } @Override public int read(byte[] buf, int off, int len) throws TTransportException { throw new TTransportException(); } @Override public void write(byte[] buf, int off, int len) throws TTransportException { } }); separatedProtocol.initialize(null, new Properties()); try { separatedProtocol.readStructBegin(); fail("Runtime Exception is expected if the intialization of tokenizer failed."); } catch (Exception e) { assertTrue(e.getCause() instanceof TTransportException); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.spring.ws.filter.impl; import java.util.Map; import javax.xml.XMLConstants; import javax.xml.transform.ErrorListener; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.component.spring.ws.filter.MessageFilter; import org.apache.camel.component.xslt.XsltUriResolver; import org.apache.camel.component.xslt.saxon.XsltSaxonComponent; import org.apache.camel.spi.ClassResolver; import org.apache.camel.support.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.ws.WebServiceMessage; import org.springframework.ws.soap.SoapMessage; /** * Message filter that transforms the header of a soap message */ public class HeaderTransformationMessageFilter implements MessageFilter { private static final String SAXON_TRANSFORMER_FACTORY_CLASS_NAME = "net.sf.saxon.TransformerFactoryImpl"; private static final String SOAP_HEADER_TRANSFORMATION_PROBLEM = "Soap header transformation problem"; private static final Logger LOG = LoggerFactory.getLogger(HeaderTransformationMessageFilter.class); private String xslt; private boolean saxon; /** * @param xslt */ public HeaderTransformationMessageFilter(String xslt) { this.xslt = xslt; } @Override public void filterProducer(Exchange exchange, WebServiceMessage webServiceMessage) { if (exchange != null) { processHeader(exchange.getContext(), exchange.getIn(), webServiceMessage); } } @Override public void filterConsumer(Exchange exchange, WebServiceMessage webServiceMessage) { if (exchange != null) { Message responseMessage = exchange.getMessage(); processHeader(exchange.getContext(), responseMessage, webServiceMessage); } } /** * Transform the header * * @param context * @param inOrOut * @param webServiceMessage */ private void processHeader(CamelContext context, Message inOrOut, WebServiceMessage webServiceMessage) { if (webServiceMessage instanceof SoapMessage) { SoapMessage soapMessage = (SoapMessage) webServiceMessage; try { XsltUriResolver resolver = new XsltUriResolver(context, xslt); Source stylesheetResource = resolver.resolve(xslt, null); TransformerFactory transformerFactory = getTransformerFactory(context); Transformer transformer = transformerFactory.newTransformer(stylesheetResource); addParameters(inOrOut, transformer); transformer.transform(soapMessage.getSoapHeader().getSource(), soapMessage.getSoapHeader().getResult()); } catch (TransformerException e) { throw new RuntimeException("Cannot transform the header of the soap message", e); } } } /** * Adding the headers of the message as parameter to the transformer * * @param inOrOut * @param transformer */ private void addParameters(Message inOrOut, Transformer transformer) { Map<String, Object> headers = inOrOut.getHeaders(); for (Map.Entry<String, Object> headerEntry : headers.entrySet()) { String key = headerEntry.getKey(); // Key's with '$' are not allowed in XSLT if (key != null && !key.startsWith("$")) { transformer.setParameter(key, String.valueOf(headerEntry.getValue())); } } } /** * Getting a {@link TransformerFactory} with logging * * @return {@link TransformerFactory} */ private TransformerFactory getTransformerFactory(CamelContext context) { TransformerFactory transformerFactory = null; if (saxon) { transformerFactory = getSaxonTransformerFactory(context); } else { transformerFactory = TransformerFactory.newInstance(); } if (transformerFactory == null) { throw new IllegalStateException("Cannot resolve a transformer factory"); } try { transformerFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, Boolean.TRUE); } catch (TransformerConfigurationException ex) { // ignore } transformerFactory.setErrorListener(new ErrorListener() { @Override public void warning(TransformerException exception) throws TransformerException { LOG.warn(SOAP_HEADER_TRANSFORMATION_PROBLEM, exception); } @Override public void fatalError(TransformerException exception) throws TransformerException { LOG.error(SOAP_HEADER_TRANSFORMATION_PROBLEM, exception); } @Override public void error(TransformerException exception) throws TransformerException { LOG.error(SOAP_HEADER_TRANSFORMATION_PROBLEM, exception); } }); return transformerFactory; } /** * Loading the saxon transformer class * * @param context * @return */ private TransformerFactory getSaxonTransformerFactory(CamelContext context) { final ClassResolver resolver = context.getClassResolver(); try { Class<TransformerFactory> factoryClass = resolver.resolveMandatoryClass( SAXON_TRANSFORMER_FACTORY_CLASS_NAME, TransformerFactory.class, XsltSaxonComponent.class.getClassLoader()); if (factoryClass != null) { return ObjectHelper.newInstance(factoryClass); } } catch (ClassNotFoundException e) { throw new RuntimeException("Cannot load the saxon transformer class", e); } return null; } public String getXslt() { return xslt; } public void setXslt(String xslt) { this.xslt = xslt; } public boolean isSaxon() { return saxon; } public void setSaxon(boolean saxon) { this.saxon = saxon; } }
package com.game.core; import java.util.Random; import android.util.SparseIntArray; public class Grid { private int capacity; private int column; private int row; private int max; private int randomTileIndex; private int targetTileVal; private SparseIntArray occupied_index; private SparseIntArray should_move_index; private SparseIntArray should_move_val; public static final int CELL_EMPTY = 0; public static final int INVALID_RANDOM_INDEX=-1; public static final int MERGE_DIRECTION_LEFT = 0; public static final int MERGE_DIRECTION_RIGHT = 1; public static final int MERGE_DIRECTION_TOP = 2; public static final int MERGE_DIRECTION_BOTTOM = 3; public static final int MERGE_INVALID_MAX = -2; public static final int MERGE_FAIL = -3; public static final int MERGE_NO_OP = -1; public static final int GAME_WIN = 1; public static final int GAME_ON = 0; public static final int GAME_OVER = -4; public static final int TARGET_TILE_VAL = 2048; //probability of generating a 2 or a 4 public static int RANDOM_MIN_TILE_PROBABILITY = 60; public Grid (int size){ if (size<0) throw new Error("Grid size cannot be negative"); this.capacity = size*size; this.column = size; this.row = size; this.max = 0; this.targetTileVal = Grid.TARGET_TILE_VAL; this.randomTileIndex = Grid.INVALID_RANDOM_INDEX; this.occupied_index = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; this.should_move_index = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; this.should_move_val = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; } public Grid(int row, int column){ if (row<0 || column<0) throw new Error("Grid size cannot be negative"); this.capacity = row*column; this.column = row; this.row = column; this.max = 0; this.targetTileVal = Grid.TARGET_TILE_VAL; this.randomTileIndex = Grid.INVALID_RANDOM_INDEX; this.occupied_index = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; this.should_move_index = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; this.should_move_val = new SparseIntArray(this.capacity){ public int get(int key){ return this.get(key,CELL_EMPTY); } }; } protected void fillCell(int row,int column,int val){ if (this.column<1 && this.row<1){ throw new Error("Initialize error"); } else if (row<0 || row>this.row || column<0 || column>this.column){ throw new Error("Function argument error"); } else if (val<0){ throw new Error("Function argument error"); } else { fillCell((row-1)*this.column+column-1,val); } } protected void fillCell(int index,int value){ if (index>this.capacity)throw new Error("Function argument error"); else { this.occupied_index.put(index,value); } } protected boolean eraseCell(int row,int column,int val){ if (this.column<1 && this.row<1){ throw new Error("Initialize error"); } else if (row<0 || row>this.row || column<0 || column>this.column){ throw new Error("Function argument error"); } else if (val<0){ throw new Error("Function argument error"); } else { return eraseCell((row-1)*this.column+column-1); } } protected boolean eraseCell(int index){ if (index>this.capacity)return false; else { this.occupied_index.delete(index); return true; } } protected void replaceCell(int fromCell,int toCell,int val){ should_move_val.put(fromCell, occupied_index.get(fromCell)); fillCell(toCell,val); eraseCell(fromCell); should_move_index.put(fromCell,toCell); } protected void empty(){ this.occupied_index.clear(); this.should_move_index.clear(); this.should_move_val.clear(); this.randomTileIndex = Grid.INVALID_RANDOM_INDEX; this.max = 0; } protected boolean randomAvailCell(int val){ if (this.occupied_index.size()>this.capacity){ return false; } Random r = new Random(); int index = r.nextInt(capacity); if (!isOccupied(index)){ this.occupied_index.put(index, val); this.randomTileIndex = index; if (val>this.max)this.max = val; return true; } for (int i=1;i<capacity;i++){ if (index-i>=0){ if (!isOccupied(index-i)){ this.occupied_index.put(index-i, val); this.randomTileIndex = index-i; if (val>this.max)this.max = val; return true; } } if (index+i<capacity){ if (!isOccupied(index+i)){ this.occupied_index.put(index+i, val); this.randomTileIndex = index+i; if (val>this.max)this.max = val; return true; } } } return false; } public boolean isOccupied(int index){ int key_val = this.occupied_index.get(index); if (key_val!= CELL_EMPTY){ return true; } return false; } public int getCellVal(int row, int column){ if (this.column<1 && this.row<1){ throw new Error("Initialize Error"); } else if (row<0 || row>this.row || column<0 || column>this.column){ throw new Error("Function argument error"); } int index = (row-1)*this.column+column-1; return getCellVal(index); } public int getCellVal(int index){ if (index >= this.capacity)return CELL_EMPTY; int ret = occupied_index.get(index,CELL_EMPTY); if (ret == CELL_EMPTY)return CELL_EMPTY; return ret; } public boolean isFull(){ return this.occupied_index.size()>=this.capacity?true:false; } public int merge(int direction){ this.randomTileIndex = Grid.INVALID_RANDOM_INDEX; this.should_move_index.clear(); this.should_move_val.clear(); if (max >= this.targetTileVal)return MERGE_NO_OP; int mergedScore = 0; if (this.occupied_index.size()==0) return MERGE_NO_OP; boolean moved = false; switch (direction){ case MERGE_DIRECTION_LEFT: for (int i=0;i<this.row;i++){ if (this.column == 1)break; int j = 0; int offset = i*this.column; int curr_index = offset; while (j<this.column){ if (isOccupied(offset+j)){ if (curr_index < offset+j){ if (isOccupied(curr_index)){ int curr_index_val = getCellVal(curr_index); int j_index_val = getCellVal(offset+j); if (curr_index_val == j_index_val){ replaceCell(offset+j,curr_index,curr_index_val*2); moved = true; mergedScore+=curr_index_val*2; if (curr_index_val*2>max)max = curr_index_val*2; } else { if (curr_index+1 != offset+j){ replaceCell(offset+j,curr_index+1,j_index_val); moved = true; } if (j_index_val>max)max = j_index_val; if (curr_index_val>max)max = curr_index_val; } curr_index++; } else { int curr_index_val = getCellVal(offset+j); if (curr_index_val>max) { max = curr_index_val; } replaceCell(offset+j,curr_index,curr_index_val); moved = true; } } } j++; } } break; case MERGE_DIRECTION_RIGHT: for (int i=0;i<this.row;i++){ if (this.column == 1)break; int j = this.column - 1; int offset = i*this.column; int curr_index = offset + this.column - 1; while (j>=0){ if (isOccupied(offset+j)){ if (curr_index > offset+j){ if (isOccupied(curr_index)){ int curr_index_val = getCellVal(curr_index); int j_index_val = getCellVal(offset+j); if (curr_index_val == j_index_val){ replaceCell(offset+j,curr_index,curr_index_val*2); moved = true; mergedScore+=curr_index_val*2; if (curr_index_val*2>max)max = curr_index_val*2; } else { if (curr_index-1 != offset+j){ replaceCell(offset+j,curr_index-1,j_index_val); moved = true; } if (j_index_val>max)max = j_index_val; if (curr_index_val>max)max = curr_index_val; } curr_index--; } else { int curr_index_val = getCellVal(offset+j); if (curr_index_val>max) { max = curr_index_val; } replaceCell(offset+j,curr_index,curr_index_val); moved = true; } } } j--; } } break; case MERGE_DIRECTION_BOTTOM: for (int i=0;i<this.column;i++){ if (this.row == 1)break; int j = this.column * (this.row-1)+i; int curr_index = j; while (j>=i){ if (isOccupied(j)){ if (curr_index > j){ if (isOccupied(curr_index)){ int curr_index_val = getCellVal(curr_index); int j_index_val = getCellVal(j); if (curr_index_val == j_index_val){ replaceCell(j,curr_index,curr_index_val*2); moved = true; mergedScore+=curr_index_val*2; if (curr_index_val*2>max)max = curr_index_val*2; } else { if (curr_index-this.column != j){ replaceCell(j,curr_index-this.column,j_index_val); moved = true; } if (j_index_val>max)max = j_index_val; if (curr_index_val>max)max = curr_index_val; } curr_index-=this.column; } else { int curr_index_val = getCellVal(j); if (curr_index_val>max) { max = curr_index_val; } replaceCell(j,curr_index,curr_index_val); moved = true; } } } j-=this.column; } } break; case MERGE_DIRECTION_TOP: for (int i=0;i<this.column;i++){ if (this.row == 1)break; int j = i; int curr_index = j; while (j<=this.column * (this.row-1)+i){ if (isOccupied(j)){ if (curr_index < j){ if (isOccupied(curr_index)){ int curr_index_val = getCellVal(curr_index); int j_index_val = getCellVal(j); if (curr_index_val == j_index_val){ replaceCell(j,curr_index,curr_index_val*2); moved = true; mergedScore+=curr_index_val*2; if (curr_index_val*2>max)max = curr_index_val*2; } else { if (curr_index+this.column != j){ replaceCell(j,curr_index+this.column,j_index_val); moved = true; } if (j_index_val>max)max = j_index_val; if (curr_index_val>max)max = curr_index_val; } curr_index+=this.column; } else { int curr_index_val = getCellVal(j); if (curr_index_val>max) { max = curr_index_val; } replaceCell(j,curr_index,curr_index_val); moved = true; } } } j+=this.column; } } break; default:return MERGE_FAIL; } if (max==MERGE_INVALID_MAX)return MERGE_INVALID_MAX; if (moved){ Random rand = new Random(); int randVal = rand.nextInt(100); boolean success = false; if (randVal<Grid.RANDOM_MIN_TILE_PROBABILITY){ success = randomAvailCell(2); } else { success = randomAvailCell(4); } if (!success){ throw new Error("Generate random tile error"); } return mergedScore; } return Grid.MERGE_NO_OP; } public int checkGameState(){ if (max>=targetTileVal)return Grid.GAME_WIN; if (this.occupied_index.size()<capacity)return Grid.GAME_ON; for (int i=0;i<capacity;i++){ int currTileVal = occupied_index.get(i); int nextTileVal = ((i+1)%this.column==0)? CELL_EMPTY:occupied_index.get(i+1); int downTileVal = occupied_index.get(i+this.column); if (nextTileVal == currTileVal || downTileVal == currTileVal) return Grid.GAME_ON; } return Grid.GAME_OVER; } public int getMax(){ return this.max; } public int getTargetVal(){ return this.targetTileVal; } public void setTargetVal(int val){ this.targetTileVal = val; } public SparseIntArray getShouldMoveIndex(){ return this.should_move_index; } public SparseIntArray getShouldMoveVal(){ return this.should_move_val; } public int getCurrentRandomTileIndex(){ return this.randomTileIndex; } public int columnCount(){ return this.column; } public int rowCount(){ return this.row; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package ch.boye.httpclientandroidlib.message; import java.util.List; import java.util.ArrayList; import ch.boye.httpclientandroidlib.HeaderElement; import ch.boye.httpclientandroidlib.NameValuePair; import ch.boye.httpclientandroidlib.ParseException; import ch.boye.httpclientandroidlib.protocol.HTTP; import ch.boye.httpclientandroidlib.util.CharArrayBuffer; /** * Basic implementation for parsing header values into elements. * Instances of this class are stateless and thread-safe. * Derived classes are expected to maintain these properties. * * @since 4.0 */ public class BasicHeaderValueParser implements HeaderValueParser { /** * A default instance of this class, for use as default or fallback. * Note that {@link BasicHeaderValueParser} is not a singleton, there * can be many instances of the class itself and of derived classes. * The instance here provides non-customized, default behavior. */ public final static BasicHeaderValueParser DEFAULT = new BasicHeaderValueParser(); private final static char PARAM_DELIMITER = ';'; private final static char ELEM_DELIMITER = ','; private final static char[] ALL_DELIMITERS = new char[] { PARAM_DELIMITER, ELEM_DELIMITER }; // public default constructor /** * Parses elements with the given parser. * * @param value the header value to parse * @param parser the parser to use, or <code>null</code> for default * * @return array holding the header elements, never <code>null</code> */ public final static HeaderElement[] parseElements(final String value, HeaderValueParser parser) throws ParseException { if (value == null) { throw new IllegalArgumentException ("Value to parse may not be null"); } if (parser == null) parser = BasicHeaderValueParser.DEFAULT; CharArrayBuffer buffer = new CharArrayBuffer(value.length()); buffer.append(value); ParserCursor cursor = new ParserCursor(0, value.length()); return parser.parseElements(buffer, cursor); } // non-javadoc, see interface HeaderValueParser public HeaderElement[] parseElements(final CharArrayBuffer buffer, final ParserCursor cursor) { if (buffer == null) { throw new IllegalArgumentException("Char array buffer may not be null"); } if (cursor == null) { throw new IllegalArgumentException("Parser cursor may not be null"); } List elements = new ArrayList(); while (!cursor.atEnd()) { HeaderElement element = parseHeaderElement(buffer, cursor); if (!(element.getName().length() == 0 && element.getValue() == null)) { elements.add(element); } } return (HeaderElement[]) elements.toArray(new HeaderElement[elements.size()]); } /** * Parses an element with the given parser. * * @param value the header element to parse * @param parser the parser to use, or <code>null</code> for default * * @return the parsed header element */ public final static HeaderElement parseHeaderElement(final String value, HeaderValueParser parser) throws ParseException { if (value == null) { throw new IllegalArgumentException ("Value to parse may not be null"); } if (parser == null) parser = BasicHeaderValueParser.DEFAULT; CharArrayBuffer buffer = new CharArrayBuffer(value.length()); buffer.append(value); ParserCursor cursor = new ParserCursor(0, value.length()); return parser.parseHeaderElement(buffer, cursor); } // non-javadoc, see interface HeaderValueParser public HeaderElement parseHeaderElement(final CharArrayBuffer buffer, final ParserCursor cursor) { if (buffer == null) { throw new IllegalArgumentException("Char array buffer may not be null"); } if (cursor == null) { throw new IllegalArgumentException("Parser cursor may not be null"); } NameValuePair nvp = parseNameValuePair(buffer, cursor); NameValuePair[] params = null; if (!cursor.atEnd()) { char ch = buffer.charAt(cursor.getPos() - 1); if (ch != ELEM_DELIMITER) { params = parseParameters(buffer, cursor); } } return createHeaderElement(nvp.getName(), nvp.getValue(), params); } /** * Creates a header element. * Called from {@link #parseHeaderElement}. * * @return a header element representing the argument */ protected HeaderElement createHeaderElement( final String name, final String value, final NameValuePair[] params) { return new BasicHeaderElement(name, value, params); } /** * Parses parameters with the given parser. * * @param value the parameter list to parse * @param parser the parser to use, or <code>null</code> for default * * @return array holding the parameters, never <code>null</code> */ public final static NameValuePair[] parseParameters(final String value, HeaderValueParser parser) throws ParseException { if (value == null) { throw new IllegalArgumentException ("Value to parse may not be null"); } if (parser == null) parser = BasicHeaderValueParser.DEFAULT; CharArrayBuffer buffer = new CharArrayBuffer(value.length()); buffer.append(value); ParserCursor cursor = new ParserCursor(0, value.length()); return parser.parseParameters(buffer, cursor); } // non-javadoc, see interface HeaderValueParser public NameValuePair[] parseParameters(final CharArrayBuffer buffer, final ParserCursor cursor) { if (buffer == null) { throw new IllegalArgumentException("Char array buffer may not be null"); } if (cursor == null) { throw new IllegalArgumentException("Parser cursor may not be null"); } int pos = cursor.getPos(); int indexTo = cursor.getUpperBound(); while (pos < indexTo) { char ch = buffer.charAt(pos); if (HTTP.isWhitespace(ch)) { pos++; } else { break; } } cursor.updatePos(pos); if (cursor.atEnd()) { return new NameValuePair[] {}; } List params = new ArrayList(); while (!cursor.atEnd()) { NameValuePair param = parseNameValuePair(buffer, cursor); params.add(param); char ch = buffer.charAt(cursor.getPos() - 1); if (ch == ELEM_DELIMITER) { break; } } return (NameValuePair[]) params.toArray(new NameValuePair[params.size()]); } /** * Parses a name-value-pair with the given parser. * * @param value the NVP to parse * @param parser the parser to use, or <code>null</code> for default * * @return the parsed name-value pair */ public final static NameValuePair parseNameValuePair(final String value, HeaderValueParser parser) throws ParseException { if (value == null) { throw new IllegalArgumentException ("Value to parse may not be null"); } if (parser == null) parser = BasicHeaderValueParser.DEFAULT; CharArrayBuffer buffer = new CharArrayBuffer(value.length()); buffer.append(value); ParserCursor cursor = new ParserCursor(0, value.length()); return parser.parseNameValuePair(buffer, cursor); } // non-javadoc, see interface HeaderValueParser public NameValuePair parseNameValuePair(final CharArrayBuffer buffer, final ParserCursor cursor) { return parseNameValuePair(buffer, cursor, ALL_DELIMITERS); } private static boolean isOneOf(final char ch, final char[] chs) { if (chs != null) { for (int i = 0; i < chs.length; i++) { if (ch == chs[i]) { return true; } } } return false; } public NameValuePair parseNameValuePair(final CharArrayBuffer buffer, final ParserCursor cursor, final char[] delimiters) { if (buffer == null) { throw new IllegalArgumentException("Char array buffer may not be null"); } if (cursor == null) { throw new IllegalArgumentException("Parser cursor may not be null"); } boolean terminated = false; int pos = cursor.getPos(); int indexFrom = cursor.getPos(); int indexTo = cursor.getUpperBound(); // Find name String name = null; while (pos < indexTo) { char ch = buffer.charAt(pos); if (ch == '=') { break; } if (isOneOf(ch, delimiters)) { terminated = true; break; } pos++; } if (pos == indexTo) { terminated = true; name = buffer.substringTrimmed(indexFrom, indexTo); } else { name = buffer.substringTrimmed(indexFrom, pos); pos++; } if (terminated) { cursor.updatePos(pos); return createNameValuePair(name, null); } // Find value String value = null; int i1 = pos; boolean qouted = false; boolean escaped = false; while (pos < indexTo) { char ch = buffer.charAt(pos); if (ch == '"' && !escaped) { qouted = !qouted; } if (!qouted && !escaped && isOneOf(ch, delimiters)) { terminated = true; break; } if (escaped) { escaped = false; } else { escaped = qouted && ch == '\\'; } pos++; } int i2 = pos; // Trim leading white spaces while (i1 < i2 && (HTTP.isWhitespace(buffer.charAt(i1)))) { i1++; } // Trim trailing white spaces while ((i2 > i1) && (HTTP.isWhitespace(buffer.charAt(i2 - 1)))) { i2--; } // Strip away quotes if necessary if (((i2 - i1) >= 2) && (buffer.charAt(i1) == '"') && (buffer.charAt(i2 - 1) == '"')) { i1++; i2--; } value = buffer.substring(i1, i2); if (terminated) { pos++; } cursor.updatePos(pos); return createNameValuePair(name, value); } /** * Creates a name-value pair. * Called from {@link #parseNameValuePair}. * * @param name the name * @param value the value, or <code>null</code> * * @return a name-value pair representing the arguments */ protected NameValuePair createNameValuePair(final String name, final String value) { return new BasicNameValuePair(name, value); } }
/* * Copyright 2010 Srikanth Reddy Lingala * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.lingala.zip4j.unzip; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.util.Arrays; import java.util.zip.CRC32; import net.lingala.zip4j.core.HeaderReader; import net.lingala.zip4j.crypto.AESDecrypter; import net.lingala.zip4j.crypto.IDecrypter; import net.lingala.zip4j.crypto.StandardDecrypter; import net.lingala.zip4j.exception.ZipException; import net.lingala.zip4j.io.InflaterInputStream; import net.lingala.zip4j.io.PartInputStream; import net.lingala.zip4j.io.ZipInputStream; import net.lingala.zip4j.model.AESExtraDataRecord; import net.lingala.zip4j.model.FileHeader; import net.lingala.zip4j.model.LocalFileHeader; import net.lingala.zip4j.model.UnzipParameters; import net.lingala.zip4j.model.ZipModel; import net.lingala.zip4j.progress.ProgressMonitor; import net.lingala.zip4j.util.InternalZipConstants; import net.lingala.zip4j.util.Raw; import net.lingala.zip4j.util.Zip4jConstants; import net.lingala.zip4j.util.Zip4jUtil; public class UnzipEngine { private ZipModel zipModel; private FileHeader fileHeader; private int currSplitFileCounter = 0; private LocalFileHeader localFileHeader; private IDecrypter decrypter; private CRC32 crc; public UnzipEngine(ZipModel zipModel, FileHeader fileHeader) throws ZipException { if (zipModel == null || fileHeader == null) { throw new ZipException("Invalid parameters passed to StoreUnzip. One or more of the parameters were null"); } this.zipModel = zipModel; this.fileHeader = fileHeader; this.crc = new CRC32(); } public void unzipFile(ProgressMonitor progressMonitor, String outPath, String newFileName, UnzipParameters unzipParameters) throws ZipException { if (zipModel == null || fileHeader == null || !Zip4jUtil.isStringNotNullAndNotEmpty(outPath)) { throw new ZipException("Invalid parameters passed during unzipping file. One or more of the parameters were null"); } InputStream is = null; OutputStream os = null; try { byte[] buff = new byte[InternalZipConstants.BUFF_SIZE]; int readLength = -1; is = getInputStream(); os = getOutputStream(outPath, newFileName); while ((readLength = is.read(buff)) != -1) { os.write(buff, 0, readLength); progressMonitor.updateWorkCompleted(readLength); if (progressMonitor.isCancelAllTasks()) { progressMonitor.setResult(ProgressMonitor.RESULT_CANCELLED); progressMonitor.setState(ProgressMonitor.STATE_READY); return; } } closeStreams(is, os); UnzipUtil.applyFileAttributes(fileHeader, new File(getOutputFileNameWithPath(outPath, newFileName)), unzipParameters); } catch (IOException e) { throw new ZipException(e); } catch (Exception e) { throw new ZipException(e); } finally { closeStreams(is, os); } } public ZipInputStream getInputStream() throws ZipException { if (fileHeader == null) { throw new ZipException("file header is null, cannot get inputstream"); } RandomAccessFile raf = null; try { raf = createFileHandler(InternalZipConstants.READ_MODE); String errMsg = "local header and file header do not match"; //checkSplitFile(); if (!checkLocalHeader()) throw new ZipException(errMsg); init(raf); long comprSize = localFileHeader.getCompressedSize(); long offsetStartOfData = localFileHeader.getOffsetStartOfData(); if (localFileHeader.isEncrypted()) { if (localFileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) { if (decrypter instanceof AESDecrypter) { comprSize -= (((AESDecrypter)decrypter).getSaltLength() + ((AESDecrypter)decrypter).getPasswordVerifierLength() + 10); offsetStartOfData += (((AESDecrypter)decrypter).getSaltLength() + ((AESDecrypter)decrypter).getPasswordVerifierLength()); } else { throw new ZipException("invalid decryptor when trying to calculate " + "compressed size for AES encrypted file: " + fileHeader.getFileName()); } } else if (localFileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) { comprSize -= InternalZipConstants.STD_DEC_HDR_SIZE; offsetStartOfData += InternalZipConstants.STD_DEC_HDR_SIZE; } } int compressionMethod = fileHeader.getCompressionMethod(); if (fileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) { if (fileHeader.getAesExtraDataRecord() != null) { compressionMethod = fileHeader.getAesExtraDataRecord().getCompressionMethod(); } else { throw new ZipException("AESExtraDataRecord does not exist for AES encrypted file: " + fileHeader.getFileName()); } } raf.seek(offsetStartOfData); switch (compressionMethod) { case Zip4jConstants.COMP_STORE: return new ZipInputStream(new PartInputStream(raf, offsetStartOfData, comprSize, this)); case Zip4jConstants.COMP_DEFLATE: return new ZipInputStream(new InflaterInputStream(raf, offsetStartOfData, comprSize, this)); default: throw new ZipException("compression type not supported"); } } catch (ZipException e) { if (raf != null) { try { raf.close(); } catch (IOException e1) { //ignore } } throw e; } catch (Exception e) { if (raf != null) { try { raf.close(); } catch (IOException e1) { } } throw new ZipException(e); } } private void init(RandomAccessFile raf) throws ZipException { if (localFileHeader == null) { throw new ZipException("local file header is null, cannot initialize input stream"); } try { initDecrypter(raf); } catch (ZipException e) { throw e; } catch (Exception e) { throw new ZipException(e); } } private void initDecrypter(RandomAccessFile raf) throws ZipException { if (localFileHeader == null) { throw new ZipException("local file header is null, cannot init decrypter"); } if (localFileHeader.isEncrypted()) { if (localFileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) { decrypter = new StandardDecrypter(fileHeader, getStandardDecrypterHeaderBytes(raf)); } else if (localFileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) { decrypter = new AESDecrypter(localFileHeader, getAESSalt(raf), getAESPasswordVerifier(raf)); } else { throw new ZipException("unsupported encryption method"); } } } private byte[] getStandardDecrypterHeaderBytes(RandomAccessFile raf) throws ZipException { try { byte[] headerBytes = new byte[InternalZipConstants.STD_DEC_HDR_SIZE]; raf.seek(localFileHeader.getOffsetStartOfData()); raf.read(headerBytes, 0, 12); return headerBytes; } catch (IOException e) { throw new ZipException(e); } catch (Exception e) { throw new ZipException(e); } } private byte[] getAESSalt(RandomAccessFile raf) throws ZipException { if (localFileHeader.getAesExtraDataRecord() == null) return null; try { AESExtraDataRecord aesExtraDataRecord = localFileHeader.getAesExtraDataRecord(); byte[] saltBytes = new byte[calculateAESSaltLength(aesExtraDataRecord)]; raf.seek(localFileHeader.getOffsetStartOfData()); raf.read(saltBytes); return saltBytes; } catch (IOException e) { throw new ZipException(e); } } private byte[] getAESPasswordVerifier(RandomAccessFile raf) throws ZipException { try { byte[] pvBytes = new byte[2]; raf.read(pvBytes); return pvBytes; } catch (IOException e) { throw new ZipException(e); } } private int calculateAESSaltLength(AESExtraDataRecord aesExtraDataRecord) throws ZipException { if (aesExtraDataRecord == null) { throw new ZipException("unable to determine salt length: AESExtraDataRecord is null"); } switch (aesExtraDataRecord.getAesStrength()) { case Zip4jConstants.AES_STRENGTH_128: return 8; case Zip4jConstants.AES_STRENGTH_192: return 12; case Zip4jConstants.AES_STRENGTH_256: return 16; default: throw new ZipException("unable to determine salt length: invalid aes key strength"); } } public void checkCRC() throws ZipException { if (fileHeader != null) { if (fileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) { if (decrypter != null && decrypter instanceof AESDecrypter) { byte[] tmpMacBytes = ((AESDecrypter)decrypter).getCalculatedAuthenticationBytes(); byte[] storedMac = ((AESDecrypter)decrypter).getStoredMac(); byte[] calculatedMac = new byte[InternalZipConstants.AES_AUTH_LENGTH]; if (calculatedMac == null || storedMac == null) { throw new ZipException("CRC (MAC) check failed for " + fileHeader.getFileName()); } System.arraycopy(tmpMacBytes, 0, calculatedMac, 0, InternalZipConstants.AES_AUTH_LENGTH); if (!Arrays.equals(calculatedMac, storedMac)) { throw new ZipException("invalid CRC (MAC) for file: " + fileHeader.getFileName()); } } } else { long calculatedCRC = crc.getValue() & 0xffffffffL; if (calculatedCRC != fileHeader.getCrc32()) { String errMsg = "invalid CRC for file: " + fileHeader.getFileName(); if (localFileHeader.isEncrypted() && localFileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) { errMsg += " - Wrong Password?"; } throw new ZipException(errMsg); } } } } // private void checkCRC() throws ZipException { // if (fileHeader != null) { // if (fileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) { // if (decrypter != null && decrypter instanceof AESDecrypter) { // byte[] tmpMacBytes = ((AESDecrypter)decrypter).getCalculatedAuthenticationBytes(); // byte[] actualMacBytes = ((AESDecrypter)decrypter).getStoredMac(); // if (tmpMacBytes == null || actualMacBytes == null) { // throw new ZipException("null mac value for AES encrypted file: " + fileHeader.getFileName()); // } // byte[] calcMacBytes = new byte[10]; // System.arraycopy(tmpMacBytes, 0, calcMacBytes, 0, 10); // if (!Arrays.equals(calcMacBytes, actualMacBytes)) { // throw new ZipException("invalid CRC(mac) for file: " + fileHeader.getFileName()); // } // } else { // throw new ZipException("invalid decryptor...cannot calculate mac value for file: " // + fileHeader.getFileName()); // } // } else if (unzipEngine != null) { // long calculatedCRC = unzipEngine.getCRC(); // long actualCRC = fileHeader.getCrc32(); // if (calculatedCRC != actualCRC) { // throw new ZipException("invalid CRC for file: " + fileHeader.getFileName()); // } // } // } // } private boolean checkLocalHeader() throws ZipException { RandomAccessFile rafForLH = null; try { rafForLH = checkSplitFile(); if (rafForLH == null) { rafForLH = new RandomAccessFile(new File(this.zipModel.getZipFile()), InternalZipConstants.READ_MODE); } HeaderReader headerReader = new HeaderReader(rafForLH); this.localFileHeader = headerReader.readLocalFileHeader(fileHeader); if (localFileHeader == null) { throw new ZipException("error reading local file header. Is this a valid zip file?"); } //TODO Add more comparision later if (localFileHeader.getCompressionMethod() != fileHeader.getCompressionMethod()) { return false; } return true; } catch (FileNotFoundException e) { throw new ZipException(e); } finally { if (rafForLH != null) { try { rafForLH.close(); } catch (IOException e) { // Ignore this } catch (Exception e) { //Ignore this } } } } private RandomAccessFile checkSplitFile() throws ZipException { if (zipModel.isSplitArchive()) { int diskNumberStartOfFile = fileHeader.getDiskNumberStart(); currSplitFileCounter = diskNumberStartOfFile + 1; String curZipFile = zipModel.getZipFile(); String partFile = null; if (diskNumberStartOfFile == zipModel.getEndCentralDirRecord().getNoOfThisDisk()) { partFile = zipModel.getZipFile(); } else { if (diskNumberStartOfFile >= 9) { partFile = curZipFile.substring(0, curZipFile.lastIndexOf(".")) + ".z" + (diskNumberStartOfFile+ 1); } else{ partFile = curZipFile.substring(0, curZipFile.lastIndexOf(".")) + ".z0" + (diskNumberStartOfFile+ 1); } } try { RandomAccessFile raf = new RandomAccessFile(partFile, InternalZipConstants.READ_MODE); if (currSplitFileCounter == 1) { byte[] splitSig = new byte[4]; raf.read(splitSig); if (Raw.readIntLittleEndian(splitSig, 0) != InternalZipConstants.SPLITSIG) { throw new ZipException("invalid first part split file signature"); } } return raf; } catch (FileNotFoundException e) { throw new ZipException(e); } catch (IOException e) { throw new ZipException(e); } } return null; } private RandomAccessFile createFileHandler(String mode) throws ZipException { if (this.zipModel == null || !Zip4jUtil.isStringNotNullAndNotEmpty(this.zipModel.getZipFile())) { throw new ZipException("input parameter is null in getFilePointer"); } try { RandomAccessFile raf = null; if (zipModel.isSplitArchive()) { raf = checkSplitFile(); } else { raf = new RandomAccessFile(new File(this.zipModel.getZipFile()), mode); } return raf; } catch (FileNotFoundException e) { throw new ZipException(e); } catch (Exception e) { throw new ZipException(e); } } private FileOutputStream getOutputStream(String outPath, String newFileName) throws ZipException { if (!Zip4jUtil.isStringNotNullAndNotEmpty(outPath)) { throw new ZipException("invalid output path"); } try { File file = new File(getOutputFileNameWithPath(outPath, newFileName)); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } if (file.exists()) { file.delete(); } FileOutputStream fileOutputStream = new FileOutputStream(file); return fileOutputStream; } catch (FileNotFoundException e) { throw new ZipException(e); } } private String getOutputFileNameWithPath(String outPath, String newFileName) throws ZipException { String fileName = null; if (Zip4jUtil.isStringNotNullAndNotEmpty(newFileName)) { fileName = newFileName; } else { fileName = fileHeader.getFileName(); } return outPath + System.getProperty("file.separator") + fileName; } public RandomAccessFile startNextSplitFile() throws IOException, FileNotFoundException { String currZipFile = zipModel.getZipFile(); String partFile = null; if (currSplitFileCounter == zipModel.getEndCentralDirRecord().getNoOfThisDisk()) { partFile = zipModel.getZipFile(); } else { if (currSplitFileCounter >= 9) { partFile = currZipFile.substring(0, currZipFile.lastIndexOf(".")) + ".z" + (currSplitFileCounter + 1); } else { partFile = currZipFile.substring(0, currZipFile.lastIndexOf(".")) + ".z0" + (currSplitFileCounter + 1); } } currSplitFileCounter++; try { if(!Zip4jUtil.checkExists(partFile)) { throw new IOException("zip split file does not exist: " + partFile); } } catch (ZipException e) { throw new IOException(e.getMessage()); } return new RandomAccessFile(partFile, InternalZipConstants.READ_MODE); } private void closeStreams(InputStream is, OutputStream os) throws ZipException { try { if (is != null) { is.close(); is = null; } } catch (IOException e) { if (e != null && Zip4jUtil.isStringNotNullAndNotEmpty(e.getMessage())) { if (e.getMessage().indexOf(" - Wrong Password?") >= 0) { throw new ZipException(e.getMessage()); } } } finally { try { if (os != null) { os.close(); os = null; } } catch (IOException e) { //do nothing } } } public void updateCRC(int b) { crc.update(b); } public void updateCRC(byte[] buff, int offset, int len) { if (buff != null) { crc.update(buff, offset, len); } } public FileHeader getFileHeader() { return fileHeader; } public IDecrypter getDecrypter() { return decrypter; } public ZipModel getZipModel() { return zipModel; } public LocalFileHeader getLocalFileHeader() { return localFileHeader; } }
/* * Copyright (c) 2006, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.util.zip; import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; /** * Implements an output stream filter for uncompressing data stored in the * "deflate" compression format. * * @since 1.6 * @author David R Tribble (david@tribble.com) * * @see InflaterInputStream * @see DeflaterInputStream * @see DeflaterOutputStream */ public class InflaterOutputStream extends FilterOutputStream { /** Decompressor for this stream. */ protected final Inflater inf; /** Output buffer for writing uncompressed data. */ protected final byte[] buf; /** Temporary write buffer. */ private final byte[] wbuf = new byte[1]; /** Default decompressor is used. */ private boolean usesDefaultInflater = false; /** true iff {@link #close()} has been called. */ private boolean closed = false; /** * Checks to make sure that this stream has not been closed. */ private void ensureOpen() throws IOException { if (closed) { throw new IOException("Stream closed"); } } /** * Creates a new output stream with a default decompressor and buffer * size. * * @param out output stream to write the uncompressed data to * @throws NullPointerException if {@code out} is null */ public InflaterOutputStream(OutputStream out) { this(out, new Inflater()); usesDefaultInflater = true; } /** * Creates a new output stream with the specified decompressor and a * default buffer size. * * @param out output stream to write the uncompressed data to * @param infl decompressor ("inflater") for this stream * @throws NullPointerException if {@code out} or {@code infl} is null */ public InflaterOutputStream(OutputStream out, Inflater infl) { this(out, infl, 512); } /** * Creates a new output stream with the specified decompressor and * buffer size. * * @param out output stream to write the uncompressed data to * @param infl decompressor ("inflater") for this stream * @param bufLen decompression buffer size * @throws IllegalArgumentException if {@code bufLen <= 0} * @throws NullPointerException if {@code out} or {@code infl} is null */ public InflaterOutputStream(OutputStream out, Inflater infl, int bufLen) { super(out); // Sanity checks if (out == null) throw new NullPointerException("Null output"); if (infl == null) throw new NullPointerException("Null inflater"); if (bufLen <= 0) throw new IllegalArgumentException("Buffer size < 1"); // Initialize inf = infl; buf = new byte[bufLen]; } /** * Writes any remaining uncompressed data to the output stream and closes * the underlying output stream. * * @throws IOException if an I/O error occurs */ public void close() throws IOException { if (!closed) { // Complete the uncompressed output try { finish(); } finally { out.close(); closed = true; } } } /** * Flushes this output stream, forcing any pending buffered output bytes to be * written. * * @throws IOException if an I/O error occurs or this stream is already * closed */ public void flush() throws IOException { ensureOpen(); // Finish decompressing and writing pending output data if (!inf.finished()) { try { while (!inf.finished() && !inf.needsInput()) { int n; // Decompress pending output data n = inf.inflate(buf, 0, buf.length); if (n < 1) { break; } // Write the uncompressed output data block out.write(buf, 0, n); } super.flush(); } catch (DataFormatException ex) { // Improperly formatted compressed (ZIP) data String msg = ex.getMessage(); if (msg == null) { msg = "Invalid ZLIB data format"; } throw new ZipException(msg); } } } /** * Finishes writing uncompressed data to the output stream without closing * the underlying stream. Use this method when applying multiple filters in * succession to the same output stream. * * @throws IOException if an I/O error occurs or this stream is already * closed */ public void finish() throws IOException { ensureOpen(); // Finish decompressing and writing pending output data flush(); if (usesDefaultInflater) { inf.end(); } } /** * Writes a byte to the uncompressed output stream. * * @param b a single byte of compressed data to decompress and write to * the output stream * @throws IOException if an I/O error occurs or this stream is already * closed * @throws ZipException if a compression (ZIP) format error occurs */ public void write(int b) throws IOException { // Write a single byte of data wbuf[0] = (byte) b; write(wbuf, 0, 1); } /** * Writes an array of bytes to the uncompressed output stream. * * @param b buffer containing compressed data to decompress and write to * the output stream * @param off starting offset of the compressed data within {@code b} * @param len number of bytes to decompress from {@code b} * @throws IndexOutOfBoundsException if {@code off < 0}, or if * {@code len < 0}, or if {@code len > b.length - off} * @throws IOException if an I/O error occurs or this stream is already * closed * @throws NullPointerException if {@code b} is null * @throws ZipException if a compression (ZIP) format error occurs */ public void write(byte[] b, int off, int len) throws IOException { // Sanity checks ensureOpen(); if (b == null) { throw new NullPointerException("Null buffer for read"); } else if (off < 0 || len < 0 || len > b.length - off) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; } // Write uncompressed data to the output stream try { for (;;) { int n; // Fill the decompressor buffer with output data if (inf.needsInput()) { int part; if (len < 1) { break; } part = (len < 512 ? len : 512); inf.setInput(b, off, part); off += part; len -= part; } // Decompress and write blocks of output data do { n = inf.inflate(buf, 0, buf.length); if (n > 0) { out.write(buf, 0, n); } } while (n > 0); // Check the decompressor if (inf.finished()) { break; } if (inf.needsDictionary()) { throw new ZipException("ZLIB dictionary missing"); } } } catch (DataFormatException ex) { // Improperly formatted compressed (ZIP) data String msg = ex.getMessage(); if (msg == null) { msg = "Invalid ZLIB data format"; } throw new ZipException(msg); } } }
package e4m.net.tn3270; import e4m.net.RequestProperties; import e4m.net.telnet.TelnetConnection; import e4m.net.telnet.TelnetOptions; import e4m.net.telnet.TelnetProtocol; import static e4m.ref.STD_0008.*; import static e4m.ref.RFC_2355.*; import java.io.IOException; import java.net.ProtocolException; import java.net.URL; import java.net.URLConnection; /* tn3270 : // userid @ hostname : port / session ? MODEL=IBM3279-2-E & LUNAME=DEV001 & LUPOOL=GRP02 & CODEPAGE=Cp037,Cp500 MODEL -> "Device-Type" LUNAME -> "Device-Name" LUPOOL -> "Resource-Name" CODEPAGE -> "Content-Encoding" */ public class Tn3270Protocol extends TelnetProtocol { @Override protected URLConnection openConnection(URL u) throws IOException { Tn3270Connection c = new Tn3270Connection(u,this); RequestProperties.set(c, "Content-Type", "tn3270/datastream", "Content-Encoding", "Cp037", "Device-Type", "IBM-3278-2" ); RequestProperties.mapQuery(c, "Device-Type", "MODEL", "Device-Name", "LUNAME", "Resource-Name", "LUPOOL", "Content-Encoding", "CODEPAGE" ); TelnetOptions.set(c, ECHO, TRANSMIT_BINARY, TERMINAL_TYPE, END_OF_RECORD, TN3270E ); return c; } @Override protected int InterpretAsCommand(TelnetConnection c, int cmd) throws IOException { switch (cmd) { default: return super.InterpretAsCommand(c,cmd); case EOR: c.getInputStream().mark(0); // temporarily block the input stream return IAC; } } @Override protected void TelnetSubOption(TelnetConnection c, int opt) throws IOException { switch(opt) { default: super.TelnetSubOption(c,opt); case TN3270E: Tn3270E((Tn3270Connection)c); break; case TERMINAL_TYPE: TerminalType((Tn3270Connection)c); break; } } // server -> IAC SB TERMINAL-TYPE SEND IAC SE // client -> IAC SB TERMINAL-TYPE IS ... IAC SE void TerminalType(Tn3270Connection c) throws IOException { int b = c.readByte(); switch (b) { default: throw new IllegalArgumentException( "unkown TERMINAL-TYPE sub option: "+toHex(b)); case TERMINAL_SEND: TerminalType_Send(c); break; } } void TerminalType_Send(Tn3270Connection c) throws IOException { c.writeBytes( new byte[] { (byte) IAC, (byte) SB, (byte) TERMINAL_TYPE, (byte) TERMINAL_IS }, c.getHeaderField("Terminal-Type").getBytes(), new byte[] { (byte) IAC, (byte) SE } ); } // server-> IAC SB TN3270E ... IAC SE // server-> IAC SB TN3270E SEND DEVICE-TYPE IAC SE // client-> IAC SB TN3270E DEVICE-TYPE REQUEST <device-type> [ [CONNECT <resource-name>] | [ASSOCIATE <device-name>] ] IAC SE // server-> IAC SB TN3270E DEVICE-TYPE IS <device-type> CONNECT <device-name> IAC SE // server-> IAC SB TN3270E DEVICE-TYPE REJECT REASON <reason-code> IAC SE void Tn3270E(Tn3270Connection c) throws IOException { int b = c.readByte(); switch (b) { default: throw new ProtocolException( "unknown TN3270E option: "+toHex(b)); case SEND: Tn3270E_Send(c); break; case DEVICE_TYPE: Tn3270E_DeviceType(c); break; case FUNCTIONS: Tn3270E_Functions(c); break; } } void Tn3270E_DeviceType(Tn3270Connection c) throws IOException { int b = c.readByte(); switch (b) { default: throw new ProtocolException( "unknown TN3270E DEVICE-TYPE verb: "+toHex(b)); case IS: Tn3270E_DeviceType_Is(c); Server_Functions_Request(c); break; case REJECT: Tn3270E_DeviceType_Reject(c); break; } } void Tn3270E_Send(Tn3270Connection c) throws IOException { int b = c.readByte(); switch (b) { default: throw new ProtocolException( "unknown TN3270E SEND option: "+toHex(b)); case DEVICE_TYPE: Tn3270E_DeviceType_Request(c); break; } SubOption_End(c); } void Tn3270E_DeviceType_Request(Tn3270Connection c) throws IOException { String resourceName = c.getHeaderField("Resource-Name"); String deviceName = c.getHeaderField("Device-Name"); String deviceType = c.getHeaderField("Device-Type"); byte[] prefix = { (byte) IAC, (byte) SB, (byte) TN3270E, (byte) DEVICE_TYPE, (byte) REQUEST }; byte[] device = deviceType.getBytes(); byte[] suffix = { (byte) IAC, (byte) SE }; if (deviceName != null && deviceName.length() > 0) { c.writeBytes( prefix, device, new byte[] { (byte) ASSOCIATE }, deviceName.getBytes(), suffix ); } else if (resourceName != null && resourceName.length() > 0) { c.writeBytes( prefix, device, new byte[] { (byte) CONNECT }, resourceName.getBytes(), suffix ); } else { c.writeBytes( prefix, device, suffix ); } } void Tn3270E_DeviceType_Is(Tn3270Connection c) throws IOException { StringBuilder dev = readSubOptionText(c); int pos = indexOf( dev, CONNECT ); if (pos < 0) { throw new ProtocolException( "incomplete TN3270E DEVICE-TYPE description: " + dev); } c.setHeaderField("Device-Type",dev.substring(0,pos)); c.setHeaderField("Device-Name",dev.substring(pos+1)); } void Tn3270E_DeviceType_Reject(Tn3270Connection c) throws IOException { int r = c.readByte(); switch (r) { default: throw new ProtocolException( "Tn3270E DEVICE-TYPE " + c.getHeaderField("Device-Type") + " rejected: " + toHex(r)); case REASON: throw new ProtocolException( "TN3270E DEVICE-TYPE " + c.getHeaderField("Device-Type") + " rejected: reason " + REASON(c.readByte()) ); } // SubOption_End(c); } // both-> IAC SB TN3270E FUNCTIONS REQUEST <function-list> IAC SE // both-> IAC SB TN3270E FUNCTIONS IS <function-list> IAC SE void Tn3270E_Functions(Tn3270Connection c) throws IOException { int b = c.readByte(); switch (b) { default: throw new ProtocolException( "unknown TN3270E FUNCTIONS verb: " + toHex(b)); case REQUEST: Tn3270E_Functions_Request(c); // Server_Functions_Request(c); break; case IS: Tn3270E_Functions_Is(c); break; } } void Tn3270E_Functions_Request(Tn3270Connection c) throws IOException { c.writeBytes( IAC, SB, TN3270E, FUNCTIONS, IS, BIND_IMAGE, DATA_STREAM_CTL, RESPONSES, // SCS-CTL-CODES, SYSREQ, IAC, SE ); } void Server_Functions_Request(Tn3270Connection c) throws IOException { c.writeBytes( IAC, SB, TN3270E, FUNCTIONS, REQUEST, BIND_IMAGE, RESPONSES, SYSREQ, IAC, SE ); } void Tn3270E_Functions_Is(Tn3270Connection c) throws IOException { c.serverFunction(-1); StringBuilder dev = readSubOptionText(c); for (int i = 0; i < dev.length(); i++) c.serverFunction(dev.charAt(i)); } int indexOf(CharSequence seq, int ch) { for (int i = 0; i < seq.length(); i++) { if (seq.charAt(i) == (char)ch) return i; } return -1; } StringBuilder readSubOptionText(Tn3270Connection c) throws IOException { int b = 0; StringBuilder buf = new StringBuilder(); for (;;) { b = c.nextByte(); if (b == IAC) { b = c.nextByte(); if (b == SE) break; if (b != IAC) throw new ProtocolException("unexpected IAC/"+toHex(b)+" sequence"); } if (b < 0) throw new ProtocolException("broken IAC/SB sequence"); buf.append((char)b); } return buf; } static String REASON(int b) { switch (b) { default: return toHex(b); case 0: return "CONN-PARTNER"; case 1: return "DEVICE-IN-USE"; case 2: return "INV-ASSOCIATE"; case 3: return "INV-NAME"; case 4: return "INV-DEVICE-TYPE"; case 5: return "TYPE-NAME-ERROR"; case 6: return "UNKNOWN-ERROR"; case 7: return "UNSUPPORTED-REQ"; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.box.server.frontend; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.waveprotocol.box.common.DeltaSequence; import org.waveprotocol.wave.model.id.IdFilter; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.util.logging.Log; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; /** * A client's subscription to a wave view. * * @author anorth@google.com (Alex North) */ final class WaveViewSubscription { /** * State of a wavelet endpoint. */ private static final class WaveletChannelState { /** * Resulting versions of deltas submitted on this wavelet for which * the outbound delta has not yet been seen. */ public final Collection<Long> submittedEndVersions = Sets.newHashSet(); /** * Resulting version of the most recent outbound delta. */ public HashedVersion lastVersion = null; /** * Whether a submit request is awaiting a response. */ public boolean hasOutstandingSubmit = false; /** * Outbound deltas held back while a submit is in-flight. */ public List<TransformedWaveletDelta> heldBackDeltas = Lists.newLinkedList(); } private static final Log LOG = Log.get(WaveViewSubscription.class); private final WaveId waveId; private final IdFilter waveletIdFilter; private final ClientFrontend.OpenListener openListener; private final String channelId; private final LoadingCache<WaveletId, WaveletChannelState> channels = CacheBuilder.newBuilder().build(new CacheLoader<WaveletId, WaveletChannelState>() { @Override public WaveletChannelState load(WaveletId id) { return new WaveletChannelState(); } }); public WaveViewSubscription(WaveId waveId, IdFilter waveletIdFilter, String channelId, ClientFrontend.OpenListener openListener) { Preconditions.checkNotNull(waveId, "null wave id"); Preconditions.checkNotNull(waveletIdFilter, "null filter"); Preconditions.checkNotNull(openListener, "null listener"); Preconditions.checkNotNull(channelId, "null channel id"); this.waveId = waveId; this.waveletIdFilter = waveletIdFilter; this.channelId = channelId; this.openListener = openListener; } public WaveId getWaveId() { return waveId; } public ClientFrontend.OpenListener getOpenListener() { return openListener; } public String getChannelId() { return channelId; } /** * Checks whether the subscription includes a wavelet. */ public boolean includes(WaveletId waveletId) { return IdFilter.accepts(waveletIdFilter, waveletId); } /** This client sent a submit request */ public synchronized void submitRequest(WaveletName waveletName) { // A given client can only have one outstanding submit per wavelet. WaveletChannelState state; try { state = channels.get(waveletName.waveletId); } catch (ExecutionException ex) { throw new RuntimeException(ex); } Preconditions.checkState(!state.hasOutstandingSubmit, "Received overlapping submit requests to subscription %s", this); LOG.info("Submit oustandinding on channel " + channelId); state.hasOutstandingSubmit = true; } /** * A submit response for the given wavelet and version has been sent to this * client. */ public synchronized void submitResponse(WaveletName waveletName, HashedVersion version) { Preconditions.checkNotNull(version, "Null delta application version"); WaveletId waveletId = waveletName.waveletId; WaveletChannelState state; try { state = channels.get(waveletId); } catch (ExecutionException ex) { throw new RuntimeException(ex); } Preconditions.checkState(state.hasOutstandingSubmit); state.submittedEndVersions.add(version.getVersion()); state.hasOutstandingSubmit = false; LOG.info("Submit resolved on channel " + channelId); // Forward any queued deltas. List<TransformedWaveletDelta> filteredDeltas = filterOwnDeltas(state.heldBackDeltas, state); if (!filteredDeltas.isEmpty()) { sendUpdate(waveletName, filteredDeltas, null); } state.heldBackDeltas.clear(); } /** * Sends deltas for this subscription (if appropriate). * * If the update contains a delta for a wavelet where the delta is actually * from this client, the delta is dropped. If there's an outstanding submit * request the delta is queued until the submit finishes. */ public synchronized void onUpdate(WaveletName waveletName, DeltaSequence deltas) { Preconditions.checkArgument(!deltas.isEmpty()); WaveletChannelState state; try { state = channels.get(waveletName.waveletId); } catch (ExecutionException ex) { throw new RuntimeException(ex); } checkUpdateVersion(waveletName, deltas, state); state.lastVersion = deltas.getEndVersion(); if (state.hasOutstandingSubmit) { state.heldBackDeltas.addAll(deltas); } else { List<TransformedWaveletDelta> filteredDeltas = filterOwnDeltas(deltas, state); if (!filteredDeltas.isEmpty()) { sendUpdate(waveletName, filteredDeltas, null); } } } /** * Filters any deltas sent by this client from a list of received deltas. * * @param deltas received deltas * @param state channel state * @return deltas, if none are from this client, or a copy with own client's * deltas removed */ private List<TransformedWaveletDelta> filterOwnDeltas(List<TransformedWaveletDelta> deltas, WaveletChannelState state) { List<TransformedWaveletDelta> filteredDeltas = deltas; if (!state.submittedEndVersions.isEmpty()) { filteredDeltas = Lists.newArrayList(); for (TransformedWaveletDelta delta : deltas) { long deltaEndVersion = delta.getResultingVersion().getVersion(); if (!state.submittedEndVersions.remove(deltaEndVersion)) { filteredDeltas.add(delta); } } } return filteredDeltas; } /** * Sends a commit notice for this subscription. */ public synchronized void onCommit(WaveletName waveletName, HashedVersion committedVersion) { sendUpdate(waveletName, ImmutableList.<TransformedWaveletDelta>of(), committedVersion); } /** * Sends an update to the client. */ private void sendUpdate(WaveletName waveletName, List<TransformedWaveletDelta> deltas, HashedVersion committedVersion) { // Channel id needs to be sent with every message until views can be // closed, see bug 128. openListener.onUpdate(waveletName, null, deltas, committedVersion, null, channelId); } /** * Checks the update targets the next expected version. */ private void checkUpdateVersion(WaveletName waveletName, DeltaSequence deltas, WaveletChannelState state) { if (state.lastVersion != null) { long expectedVersion = state.lastVersion.getVersion(); long targetVersion = deltas.getStartVersion(); Preconditions.checkState(targetVersion == expectedVersion, "Subscription expected delta for %s targeting %s, was %s", waveletName, expectedVersion, targetVersion); } } @Override public String toString() { return "[WaveViewSubscription wave: " + waveId + ", channel: " + channelId + "]"; } }
/* * Copyright 2009-2010 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.gmf.esb.persistence; import java.util.HashMap; import java.util.Map; import org.wso2.developerstudio.eclipse.gmf.esb.APIResource; import org.wso2.developerstudio.eclipse.gmf.esb.AddressingEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.BAMMediator; import org.wso2.developerstudio.eclipse.gmf.esb.BeanMediator; import org.wso2.developerstudio.eclipse.gmf.esb.BuilderMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CallMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CloudConnectorOperation; import org.wso2.developerstudio.eclipse.gmf.esb.ConditionalRouterMediator; import org.wso2.developerstudio.eclipse.gmf.esb.DataMapperMediator; import org.wso2.developerstudio.eclipse.gmf.esb.EJBMediator; import org.wso2.developerstudio.eclipse.gmf.esb.FastXSLTMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ForEachMediator; import org.wso2.developerstudio.eclipse.gmf.esb.HTTPEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.LoopBackMediator; import org.wso2.developerstudio.eclipse.gmf.esb.NamedEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.PublishEventMediator; import org.wso2.developerstudio.eclipse.gmf.esb.RecipientListEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.RespondMediator; import org.wso2.developerstudio.eclipse.gmf.esb.RouterMediator; import org.wso2.developerstudio.eclipse.gmf.esb.AddressEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.AggregateMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CacheMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CallTemplateMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CalloutMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ClassMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CloneMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CommandMediator; import org.wso2.developerstudio.eclipse.gmf.esb.DBLookupMediator; import org.wso2.developerstudio.eclipse.gmf.esb.DBReportMediator; import org.wso2.developerstudio.eclipse.gmf.esb.DefaultEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.DropMediator; import org.wso2.developerstudio.eclipse.gmf.esb.EnqueueMediator; import org.wso2.developerstudio.eclipse.gmf.esb.EnrichMediator; import org.wso2.developerstudio.eclipse.gmf.esb.EntitlementMediator; import org.wso2.developerstudio.eclipse.gmf.esb.EsbNode; import org.wso2.developerstudio.eclipse.gmf.esb.EventMediator; import org.wso2.developerstudio.eclipse.gmf.esb.FailoverEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.FaultMediator; import org.wso2.developerstudio.eclipse.gmf.esb.FilterMediator; import org.wso2.developerstudio.eclipse.gmf.esb.HeaderMediator; import org.wso2.developerstudio.eclipse.gmf.esb.IterateMediator; import org.wso2.developerstudio.eclipse.gmf.esb.LoadBalanceEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.LogMediator; import org.wso2.developerstudio.eclipse.gmf.esb.MessageMediator; import org.wso2.developerstudio.eclipse.gmf.esb.OAuthMediator; import org.wso2.developerstudio.eclipse.gmf.esb.PayloadFactoryMediator; import org.wso2.developerstudio.eclipse.gmf.esb.PropertyMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ProxyService; import org.wso2.developerstudio.eclipse.gmf.esb.RMSequenceMediator; import org.wso2.developerstudio.eclipse.gmf.esb.RuleMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ScriptMediator; import org.wso2.developerstudio.eclipse.gmf.esb.SendMediator; import org.wso2.developerstudio.eclipse.gmf.esb.Sequence; import org.wso2.developerstudio.eclipse.gmf.esb.Sequences; import org.wso2.developerstudio.eclipse.gmf.esb.SmooksMediator; import org.wso2.developerstudio.eclipse.gmf.esb.SpringMediator; import org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator; import org.wso2.developerstudio.eclipse.gmf.esb.SwitchMediator; import org.wso2.developerstudio.eclipse.gmf.esb.Template; import org.wso2.developerstudio.eclipse.gmf.esb.TemplateEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediator; import org.wso2.developerstudio.eclipse.gmf.esb.TransactionMediator; import org.wso2.developerstudio.eclipse.gmf.esb.URLRewriteMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ValidateMediator; import org.wso2.developerstudio.eclipse.gmf.esb.WSDLEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.XQueryMediator; import org.wso2.developerstudio.eclipse.gmf.esb.XSLTMediator; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.APIResourceTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.AddresingEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.AddressEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.AggregateMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.BAMMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.BeanMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.BuilderMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CacheMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CallMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CallTemplateMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CalloutMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ClassMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CloneMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CloudConnectorOperationTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.CommandMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ConditionalRouterMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.DBLookupMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.DBReportMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.DataMapperMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.DefaultEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.DropMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.EJBMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.EnqueueMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.EnrichMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.EntitlementMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.EventMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.FailoverEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.FastXSLTMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.FaultMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.FilterMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ForEachMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.HTTPEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.HeaderMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.IterateMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.LoadBalanceEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.LogMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.LoopBackMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.MessageMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.NamedEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.OAuthMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.PayloadFactoryMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.PropertyMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ProxyServiceTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.PublishEventMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.RecipientListEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.RespondMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.RouterMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.RuleMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ScriptMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SendMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SequenceMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SequenceTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SmooksMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SpringMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.StoreMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.SwitchMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.TemplateEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.TemplateTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ThrottleMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.TransactionMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.URLReWriterMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.ValidateMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.WSDLEndPointTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.XQueryMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.XSLTMediatorTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.custom.MediatorSerializerRegister; /** * A registry of visual model object transformers. */ public class EsbTransformerRegistry { /** * Singleton instance. */ private static EsbTransformerRegistry singleton; /** * Visual model type to transformers map. */ private Map<Class<?>, EsbNodeTransformer> transformersMap; /** * Creates a new transformer registry. */ private EsbTransformerRegistry() { MediatorSerializerRegister.registerSerializers(); /* Register Custom serializers */ transformersMap = new HashMap<Class<?>, EsbNodeTransformer>(); addTransformer(ProxyService.class, new ProxyServiceTransformer()); addTransformer(DefaultEndPoint.class, new DefaultEndPointTransformer()); addTransformer(AddressEndPoint.class, new AddressEndPointTransformer()); addTransformer(DropMediator.class, new DropMediatorTransformer()); addTransformer(FilterMediator.class, new FilterMediatorTransformer()); addTransformer(LogMediator.class, new LogMediatorTransformer()); addTransformer(PropertyMediator.class, new PropertyMediatorTransformer()); addTransformer(EnrichMediator.class, new EnrichMediatorTransformer()); addTransformer(XSLTMediator.class, new XSLTMediatorTransformer()); addTransformer(FastXSLTMediator.class, new FastXSLTMediatorTransformer()); addTransformer(SwitchMediator.class, new SwitchMediatorTransformer()); addTransformer(MessageMediator.class, new MessageMediatorTransformer()); addTransformer(ClassMediator.class, new ClassMediatorTransformer()); addTransformer(FaultMediator.class, new FaultMediatorTransformer()); addTransformer(EventMediator.class, new EventMediatorTransformer()); addTransformer(FailoverEndPoint.class, new FailoverEndPointTransformer()); addTransformer(WSDLEndPoint.class, new WSDLEndPointTransformer()); addTransformer(LoadBalanceEndPoint.class, new LoadBalanceEndPointTransformer()); addTransformer(XQueryMediator.class, new XQueryMediatorTransformer()); addTransformer(Sequence.class, new SequenceMediatorTransformer()); addTransformer(DBLookupMediator.class, new DBLookupMediatorTransformer()); addTransformer(DBReportMediator.class, new DBReportMediatorTransformer()); addTransformer(HeaderMediator.class, new HeaderMediatorTransformer()); addTransformer(CacheMediator.class, new CacheMediatorTransformer()); addTransformer(AggregateMediator.class, new AggregateMediatorTransformer()); addTransformer(CalloutMediator.class, new CalloutMediatorTransformer()); addTransformer(TransactionMediator.class, new TransactionMediatorTransformer()); addTransformer(IterateMediator.class, new IterateMediatorTransformer()); addTransformer(CloneMediator.class, new CloneMediatorTransformer()); addTransformer(ThrottleMediator.class, new ThrottleMediatorTransformer()); addTransformer(OAuthMediator.class, new OAuthMediatorTransformer()); addTransformer(RuleMediator.class, new RuleMediatorTransformer()); addTransformer(SendMediator.class, new SendMediatorTransformer()); addTransformer(SpringMediator.class, new SpringMediatorTransformer()); addTransformer(ScriptMediator.class, new ScriptMediatorTransformer()); addTransformer(SmooksMediator.class, new SmooksMediatorTransformer()); addTransformer(EntitlementMediator.class, new EntitlementMediatorTransformer()); addTransformer(CommandMediator.class, new CommandMediatorTransformer()); addTransformer(StoreMediator.class, new StoreMediatorTransformer()); addTransformer(EnqueueMediator.class, new EnqueueMediatorTransformer()); addTransformer(PayloadFactoryMediator.class, new PayloadFactoryMediatorTransformer()); addTransformer(CallTemplateMediator.class, new CallTemplateMediatorTransformer()); addTransformer(RouterMediator.class, new RouterMediatorTransformer()); addTransformer(ConditionalRouterMediator.class, new ConditionalRouterMediatorTransformer()); addTransformer(ValidateMediator.class, new ValidateMediatorTransformer()); addTransformer(URLRewriteMediator.class, new URLReWriterMediatorTransformer()); addTransformer(BuilderMediator.class, new BuilderMediatorTransformer()); addTransformer(NamedEndpoint.class, new NamedEndPointTransformer()); addTransformer(APIResource.class, new APIResourceTransformer()); addTransformer(Template.class, new TemplateTransformer()); addTransformer(BAMMediator.class, new BAMMediatorTransformer()); addTransformer(EJBMediator.class, new EJBMediatorTransformer()); addTransformer(BeanMediator.class, new BeanMediatorTransformer()); addTransformer(Sequences.class, new SequenceTransformer()); addTransformer(AddressingEndpoint.class, new AddresingEndPointTransformer()); addTransformer(RecipientListEndPoint.class, new RecipientListEndPointTransformer()); addTransformer(HTTPEndpoint.class, new HTTPEndPointTransformer()); addTransformer(TemplateEndpoint.class, new TemplateEndPointTransformer()); addTransformer(CloudConnectorOperation.class, new CloudConnectorOperationTransformer()); addTransformer(LoopBackMediator.class, new LoopBackMediatorTransformer()); addTransformer(RespondMediator.class, new RespondMediatorTransformer()); addTransformer(CallMediator.class, new CallMediatorTransformer()); addTransformer(DataMapperMediator.class, new DataMapperMediatorTransformer()); addTransformer(ForEachMediator.class, new ForEachMediatorTransformer()); addTransformer(PublishEventMediator.class, new PublishEventMediatorTransformer()); } /** * @return singleton instance. */ public static EsbTransformerRegistry getInstance() { if (null == singleton) { singleton = new EsbTransformerRegistry(); } return singleton; } /** * Adds a new transformer into this registry. * * @param <K> * @param visualModelClass * @param transformer */ public <K extends EsbNode> void addTransformer(Class<K> visualModelClass, EsbNodeTransformer transformer) { transformersMap.put(visualModelClass, transformer); } /** * Attempts to locate a transformer corresponding to the specified visual model object. * * @param <K> * @param esbNode * @return */ public <K extends EsbNode> EsbNodeTransformer getTransformer(K esbNode) { return transformersMap.get(esbNode.eClass().getInstanceClass()); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.search.Filter; import org.apache.lucene.search.NumericRangeFilter; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericFloatAnalyzer; import org.elasticsearch.index.cache.field.data.FieldDataCache; import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider; import org.elasticsearch.index.field.data.FieldDataType; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.search.NumericRangeFieldDataFilter; import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.index.mapper.MapperBuilders.floatField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; /** * */ public class FloatFieldMapper extends NumberFieldMapper<Float> { public static final String CONTENT_TYPE = "float"; public static class Defaults extends NumberFieldMapper.Defaults { public static final FieldType FLOAT_FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.NUMBER_FIELD_TYPE); static { FLOAT_FIELD_TYPE.freeze(); } public static final Float NULL_VALUE = null; } public static class Builder extends NumberFieldMapper.Builder<Builder, FloatFieldMapper> { protected Float nullValue = Defaults.NULL_VALUE; public Builder(String name) { super(name, new FieldType(Defaults.FLOAT_FIELD_TYPE)); builder = this; } public Builder nullValue(float nullValue) { this.nullValue = nullValue; return this; } @Override public FloatFieldMapper build(BuilderContext context) { fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); FloatFieldMapper fieldMapper = new FloatFieldMapper(buildNames(context), precisionStep, fuzzyFactor, boost, fieldType, nullValue, ignoreMalformed(context), provider, similarity); fieldMapper.includeInAll(includeInAll); return fieldMapper; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { FloatFieldMapper.Builder builder = floatField(name); parseNumberField(builder, name, node, parserContext); for (Map.Entry<String, Object> entry : node.entrySet()) { String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { builder.nullValue(nodeFloatValue(propNode)); } } return builder; } } private Float nullValue; private String nullValueAsString; protected FloatFieldMapper(Names names, int precisionStep, String fuzzyFactor, float boost, FieldType fieldType, Float nullValue, Explicit<Boolean> ignoreMalformed, PostingsFormatProvider provider, SimilarityProvider similarity) { super(names, precisionStep, fuzzyFactor, boost, fieldType, ignoreMalformed, new NamedAnalyzer("_float/" + precisionStep, new NumericFloatAnalyzer(precisionStep)), new NamedAnalyzer("_float/max", new NumericFloatAnalyzer(Integer.MAX_VALUE)), provider, similarity); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override protected int maxPrecisionStep() { return 32; } @Override public Float value(Field field) { BytesRef value = field.binaryValue(); if (value == null) { return null; } return Numbers.bytesToFloat(value.bytes); } @Override public Float valueFromString(String value) { return Float.parseFloat(value); } @Override public String indexedValue(String value) { int intValue = NumericUtils.floatToSortableInt(Float.parseFloat(value)); BytesRef bytesRef = new BytesRef(); NumericUtils.intToPrefixCoded(intValue, precisionStep(), bytesRef); return bytesRef.utf8ToString(); } @Override public Query fuzzyQuery(String value, String minSim, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = Float.parseFloat(value); float iSim = Float.parseFloat(minSim); return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, iValue - iSim, iValue + iSim, true, true); } @Override public Query fuzzyQuery(String value, double minSim, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = Float.parseFloat(value); float iSim = (float) (minSim * dFuzzyFactor); return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, iValue - iSim, iValue + iSim, true, true); } @Override public Query fieldQuery(String value, @Nullable QueryParseContext context) { float fValue = Float.parseFloat(value); return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, fValue, fValue, true, true); } @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, lowerTerm == null ? null : Float.parseFloat(lowerTerm), upperTerm == null ? null : Float.parseFloat(upperTerm), includeLower, includeUpper); } @Override public Filter fieldFilter(String value, @Nullable QueryParseContext context) { float fValue = Float.parseFloat(value); return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep, fValue, fValue, true, true); } @Override public Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep, lowerTerm == null ? null : Float.parseFloat(lowerTerm), upperTerm == null ? null : Float.parseFloat(upperTerm), includeLower, includeUpper); } @Override public Filter rangeFilter(FieldDataCache fieldDataCache, String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeFieldDataFilter.newFloatRange(fieldDataCache, names.indexName(), lowerTerm == null ? null : Float.parseFloat(lowerTerm), upperTerm == null ? null : Float.parseFloat(upperTerm), includeLower, includeUpper); } @Override public Filter nullValueFilter() { if (nullValue == null) { return null; } return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep, nullValue, nullValue, true, true); } @Override protected boolean customBoost() { return true; } @Override protected Field innerParseCreateField(ParseContext context) throws IOException { float value; float boost = this.boost; if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { if (nullValue == null) { return null; } value = nullValue; } else if (externalValue instanceof String) { String sExternalValue = (String) externalValue; if (sExternalValue.length() == 0) { if (nullValue == null) { return null; } value = nullValue; } else { value = Float.parseFloat(sExternalValue); } } else { value = ((Number) externalValue).floatValue(); } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), Float.toString(value), boost); } } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL || (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { if (nullValue == null) { return null; } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(names.fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; Float objValue = nullValue; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { objValue = parser.floatValue(); } } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } } } if (objValue == null) { // no value return null; } value = objValue; } else { value = parser.floatValue(); if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), parser.text(), boost); } } } CustomFloatNumericField field = new CustomFloatNumericField(this, value, fieldType); field.setBoost(boost); return field; } @Override public FieldDataType fieldDataType() { return FieldDataType.DefaultTypes.FLOAT; } @Override protected String contentType() { return CONTENT_TYPE; } @Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException { super.merge(mergeWith, mergeContext); if (!this.getClass().equals(mergeWith.getClass())) { return; } if (!mergeContext.mergeFlags().simulate()) { this.nullValue = ((FloatFieldMapper) mergeWith).nullValue; this.nullValueAsString = ((FloatFieldMapper) mergeWith).nullValueAsString; } } @Override protected void doXContentBody(XContentBuilder builder) throws IOException { super.doXContentBody(builder); if (indexed() != Defaults.FLOAT_FIELD_TYPE.indexed() || analyzed() != Defaults.FLOAT_FIELD_TYPE.tokenized()) { builder.field("index", indexTokenizeOptionToString(indexed(), analyzed())); } if (stored() != Defaults.FLOAT_FIELD_TYPE.stored()) { builder.field("store", stored()); } if (storeTermVectors() != Defaults.FLOAT_FIELD_TYPE.storeTermVectors()) { builder.field("store_term_vector", storeTermVectors()); } if (storeTermVectorOffsets() != Defaults.FLOAT_FIELD_TYPE.storeTermVectorOffsets()) { builder.field("store_term_vector_offsets", storeTermVectorOffsets()); } if (storeTermVectorPositions() != Defaults.FLOAT_FIELD_TYPE.storeTermVectorPositions()) { builder.field("store_term_vector_positions", storeTermVectorPositions()); } if (storeTermVectorPayloads() != Defaults.FLOAT_FIELD_TYPE.storeTermVectorPayloads()) { builder.field("store_term_vector_payloads", storeTermVectorPayloads()); } if (omitNorms() != Defaults.FLOAT_FIELD_TYPE.omitNorms()) { builder.field("omit_norms", omitNorms()); } if (indexOptions() != Defaults.FLOAT_FIELD_TYPE.indexOptions()) { builder.field("index_options", indexOptionToString(indexOptions())); } if (precisionStep != Defaults.PRECISION_STEP) { builder.field("precision_step", precisionStep); } if (fuzzyFactor != Defaults.FUZZY_FACTOR) { builder.field("fuzzy_factor", fuzzyFactor); } if (similarity() != null) { builder.field("similarity", similarity().name()); } if (nullValue != null) { builder.field("null_value", nullValue); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } } public static class CustomFloatNumericField extends CustomNumericField { private final float number; private final NumberFieldMapper mapper; public CustomFloatNumericField(NumberFieldMapper mapper, float number, FieldType fieldType) { super(mapper, mapper.stored() ? Numbers.floatToBytes(number) : null, fieldType); this.mapper = mapper; this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer) throws IOException { if (fieldType().indexed()) { return mapper.popCachedStream().setFloatValue(number); } return null; } @Override public String numericAsString() { return Float.toString(number); } } }
/** * JBoss, Home of Professional Open Source * Copyright Red Hat, Inc., and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.message; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.exc.UnrecognizedPropertyException; import org.jboss.aerogear.unifiedpush.message.windows.TileType; import org.jboss.aerogear.unifiedpush.message.windows.Type; import org.junit.Test; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class UnifiedPushMessageTest { @Test public void shouldSerializeMessage() throws IOException { //when UnifiedPushMessage unifiedPushMessage = new UnifiedPushMessage(); Message message = new Message(); message.setAlert("HELLO!"); message.getApns().setActionCategory("some value"); message.setSound("default"); message.setBadge(2); message.getWindows().setPage("/MainPage.xaml"); message.getWindows().setType(Type.tile); message.getWindows().setTileType(TileType.TileWideBlockAndText01); message.getApns().setContentAvailable(true); final HashMap<String, Object> data = new HashMap<String, Object>(); data.put("key", "value"); data.put("key2", "other value"); message.setUserData(data); message.setSimplePush("version=123"); unifiedPushMessage.setMessage(message); final Criteria criteria = new Criteria(); criteria.setAliases(Arrays.asList("someUsername")); criteria.setDeviceTypes(Arrays.asList("someDevice")); criteria.setCategories(Arrays.asList("someCategories")); criteria.setVariants(Arrays.asList("someVariantIDs")); unifiedPushMessage.setCriteria(criteria); final Config config = new Config(); config.setTimeToLive(3360); unifiedPushMessage.setConfig(config); //then final ObjectMapper mapper = new ObjectMapper(); final JsonNode value = mapper.valueToTree(unifiedPushMessage); JsonNode format = mapper.reader().readTree(getClass().getResourceAsStream("/message-format.json")); assertEquals(format, value); } @Test public void createBroadcastMessage() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals("default", unifiedPushMessage.getMessage().getSound()); assertEquals(2, unifiedPushMessage.getMessage().getBadge()); assertEquals("someValue", unifiedPushMessage.getMessage().getUserData().get("someKey")); // no TTL: assertEquals(-1, unifiedPushMessage.getConfig().getTimeToLive()); // multiple access? assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals("someValue", unifiedPushMessage.getMessage().getUserData().get("someKey")); assertNull(unifiedPushMessage.getCriteria().getAliases()); assertNull(unifiedPushMessage.getCriteria().getDeviceTypes()); assertNull(unifiedPushMessage.getCriteria().getCategories()); assertNull(unifiedPushMessage.getCriteria().getVariants()); assertNull(unifiedPushMessage.getMessage().getSimplePush()); } @Test public void createBroadcastMessageWithSimplePush() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals("default", unifiedPushMessage.getMessage().getSound()); assertEquals(2, unifiedPushMessage.getMessage().getBadge()); assertEquals("someValue", unifiedPushMessage.getMessage().getUserData().get("someKey")); // multiple access? assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals("someValue", unifiedPushMessage.getMessage().getUserData().get("someKey")); assertNull(unifiedPushMessage.getCriteria().getAliases()); assertNull(unifiedPushMessage.getCriteria().getDeviceTypes()); assertNull(unifiedPushMessage.getCriteria().getCategories()); assertNull(unifiedPushMessage.getCriteria().getVariants()); assertEquals("version=123", unifiedPushMessage.getMessage().getSimplePush()); } @Test(expected = UnrecognizedPropertyException.class) public void createBroadcastMessageWithIncorrectSimplePush() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simplePush", "version=123"); // parse it: parsePushMessage(container); } @Test public void noBadgePayload() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals(-1, unifiedPushMessage.getMessage().getBadge()); } @Test public void contentAvailable() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); apnsObject .put("content-available", true); messageObject.put("apns",apnsObject ); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals(-1, unifiedPushMessage.getMessage().getBadge()); assertTrue(unifiedPushMessage.getMessage().getApns().isContentAvailable()); } @Test public void noContentAvailable() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("Howdy", unifiedPushMessage.getMessage().getAlert()); assertEquals(-1, unifiedPushMessage.getMessage().getBadge()); assertFalse(unifiedPushMessage.getMessage().getApns().isContentAvailable()); } @Test public void testAliasCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("alias", Arrays.asList("foo@bar.org")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getAliases()); assertEquals(1, unifiedPushMessage.getCriteria().getAliases().size()); assertEquals("foo@bar.org", unifiedPushMessage.getCriteria().getAliases().get(0)); } @Test public void testAction() throws IOException{ final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); apnsObject.put("action", "View"); messageObject.put("alert", "howdy"); messageObject.put("apns",apnsObject); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("View", unifiedPushMessage.getMessage().getApns().getAction()); } @Test public void testUrlArgs() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); final String[] urlArgs = { "Arg1", "Arg2" }; apnsObject.put("title", "I'm a Title"); apnsObject.put("url-args", urlArgs); messageObject.put("apns",apnsObject); messageObject.put("alert", "howdy"); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("[Arg1, Arg2]", Arrays.toString(unifiedPushMessage.getMessage().getApns().getUrlArgs())); } @Test public void testActionCategory() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); apnsObject.put("action-category", "POSTS"); messageObject.put("alert", "Howdy"); messageObject.put("apns",apnsObject); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("POSTS", unifiedPushMessage.getMessage().getApns().getActionCategory()); } @Test public void testLocalizedTitleKey() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); apnsObject.put("localized-title-key", "myLocalizedTitle"); messageObject.put("alert", "Howdy"); messageObject.put("apns",apnsObject); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("myLocalizedTitle", unifiedPushMessage.getMessage().getApns().getLocalizedTitleKey()); } @Test public void testLocalizedTitleArguments() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); String[] arguments = {"Jambon","ham"}; apnsObject.put("localized-title-arguments", arguments); messageObject.put("alert", "Howdy"); messageObject.put("apns",apnsObject); container.put("message", messageObject); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals("[Jambon, ham]", Arrays.toString(unifiedPushMessage.getMessage().getApns().getLocalizedTitleArguments())); } @Test public void testMultipleAliasCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("alias", Arrays.asList("foo@bar.org", "bar@foo.com")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getAliases()); assertEquals(2, unifiedPushMessage.getCriteria().getAliases().size()); assertTrue(unifiedPushMessage.getCriteria().getAliases().contains("foo@bar.org")); assertTrue(unifiedPushMessage.getCriteria().getAliases().contains("bar@foo.com")); } @Test public void testDeviceTypeCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("deviceType", Arrays.asList("iPad")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getDeviceTypes()); assertEquals(1, unifiedPushMessage.getCriteria().getDeviceTypes().size()); assertEquals("iPad", unifiedPushMessage.getCriteria().getDeviceTypes().get(0)); } @Test public void testDeviceTypesCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("deviceType", Arrays.asList("iPad", "Android")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getDeviceTypes()); assertEquals(2, unifiedPushMessage.getCriteria().getDeviceTypes().size()); assertTrue(unifiedPushMessage.getCriteria().getDeviceTypes().contains("iPad")); assertTrue(unifiedPushMessage.getCriteria().getDeviceTypes().contains("Android")); } @Test public void testCategoriesCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); container.put("message", messageObject); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("categories", Arrays.asList("football")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getCategories()); assertEquals(1, unifiedPushMessage.getCriteria().getCategories().size()); assertEquals("football", unifiedPushMessage.getCriteria().getCategories().get(0)); } @Test public void testMultipleCategoriesCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); container.put("message", messageObject); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("categories", Arrays.asList("soccer", "olympics")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getCategories()); assertEquals(2, unifiedPushMessage.getCriteria().getCategories().size()); assertTrue(unifiedPushMessage.getCriteria().getCategories().contains("olympics")); assertTrue(unifiedPushMessage.getCriteria().getCategories().contains("soccer")); } @Test public void testVariantsCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); container.put("message", messageObject); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("variants", Arrays.asList("abc-123-def-456")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getVariants()); assertEquals(1, unifiedPushMessage.getCriteria().getVariants().size()); assertEquals("abc-123-def-456", unifiedPushMessage.getCriteria().getVariants().get(0)); } @Test public void testMultipleVariantsCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); container.put("message", messageObject); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("variants", Arrays.asList("abc-123-def-456", "456-abc-123-def-bar")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertNotNull(unifiedPushMessage.getCriteria().getVariants()); assertEquals(2, unifiedPushMessage.getCriteria().getVariants().size()); assertTrue(unifiedPushMessage.getCriteria().getVariants().contains("abc-123-def-456")); assertTrue(unifiedPushMessage.getCriteria().getVariants().contains("456-abc-123-def-bar")); } @Test public void testAllCriteria() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "Howdy"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, String> data = new HashMap<String, String>(); data.put("someKey", "someValue"); messageObject.put("user-data", data); container.put("message", messageObject); messageObject.put("simple-push", "version=123"); // criteria: Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("variants", Arrays.asList("abc-123-def-456", "456-abc-123-def-bar")); criteria.put("categories", Arrays.asList("soccer", "olympics")); criteria.put("deviceType", Arrays.asList("iPad", "Android")); criteria.put("alias", Arrays.asList("foo@bar.org", "bar@foo.com")); container.put("criteria", criteria); // parse it: final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); assertEquals(2, unifiedPushMessage.getCriteria().getAliases().size()); assertTrue(unifiedPushMessage.getCriteria().getAliases().contains("foo@bar.org")); assertFalse(unifiedPushMessage.getCriteria().getAliases().contains("mrx@bar.org")); assertEquals(2, unifiedPushMessage.getCriteria().getDeviceTypes().size()); assertTrue(unifiedPushMessage.getCriteria().getDeviceTypes().contains("Android")); assertFalse(unifiedPushMessage.getCriteria().getDeviceTypes().contains("iPhone")); assertEquals(2, unifiedPushMessage.getCriteria().getCategories().size()); assertTrue(unifiedPushMessage.getCriteria().getCategories().contains("olympics")); assertFalse(unifiedPushMessage.getCriteria().getCategories().contains("Bundesliga")); assertEquals(2, unifiedPushMessage.getCriteria().getVariants().size()); assertTrue(unifiedPushMessage.getCriteria().getVariants().contains("abc-123-def-456")); assertFalse(unifiedPushMessage.getCriteria().getVariants().contains("0815")); assertEquals("version=123", unifiedPushMessage.getMessage().getSimplePush()); } @Test(expected = JsonMappingException.class) public void testVariantCriteriaParseError() throws IOException { final Map<String, Object> container = new LinkedHashMap<String, Object>(); Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("variants", "abc-123-def-456"); container.put("criteria", criteria); parsePushMessage(container); } @Test public void testMessageToJson() throws IOException, URISyntaxException { //given final Map<String, Object> container = new LinkedHashMap<String, Object>(); final Map<String, Object> messageObject = new LinkedHashMap<String, Object>(); final Map<String, Object> apnsObject = new LinkedHashMap<String, Object>(); messageObject.put("alert", "HELLO!"); messageObject.put("sound", "default"); messageObject.put("badge", 2); Map<String, Object> data = new HashMap<String, Object>(); data.put("key", "value"); data.put("key2", "value"); messageObject.put("user-data", data); apnsObject.put("action-category", "category"); apnsObject.put("content-available", "true"); messageObject.put("simple-push", "version=123"); Map<String, Object> windows = new HashMap<String, Object>(); windows.put("type", "tile"); windows.put("tileType", "TileWideBlockAndText01"); windows.put("page", "cordova"); messageObject.put("windows", windows); messageObject.put("apns",apnsObject); container.put("message", messageObject); //when final UnifiedPushMessage unifiedPushMessage = parsePushMessage(container); String json = unifiedPushMessage.toJsonString(); //then Path path = Paths.get(getClass().getResource("/message-tojson.json").toURI()); String expectedJson = new String(Files.readAllBytes(path), Charset.defaultCharset()); //poor mans equals ignore white space assertEquals(expectedJson.replaceAll("\\s", ""), json); } private UnifiedPushMessage parsePushMessage(Map<String, Object> container) throws IOException { ObjectMapper mapper = new ObjectMapper(); final String valueAsString = mapper.writeValueAsString(container); return mapper.readValue(valueAsString, UnifiedPushMessage.class); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.connector.source.lib; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.connector.source.Boundedness; import org.apache.flink.api.connector.source.Source; import org.apache.flink.api.connector.source.SourceReader; import org.apache.flink.api.connector.source.SourceReaderContext; import org.apache.flink.api.connector.source.SplitEnumerator; import org.apache.flink.api.connector.source.SplitEnumeratorContext; import org.apache.flink.api.connector.source.lib.util.IteratorSourceEnumerator; import org.apache.flink.api.connector.source.lib.util.IteratorSourceReader; import org.apache.flink.api.connector.source.lib.util.IteratorSourceSplit; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.core.memory.DataInputDeserializer; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputSerializer; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.util.NumberSequenceIterator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * A data source that produces a sequence of numbers (longs). * This source is useful for testing and for cases that just need a stream of N events of any kind. * * <p>The source splits the sequence into as many parallel sub-sequences as there are parallel * source readers. Each sub-sequence will be produced in order. * Consequently, if the parallelism is limited to one, this will produce one sequence in order. * * <p>This source is always bounded. For very long sequences (for example over the entire domain * of long integer values), user may want to consider executing the application in a streaming manner, * because, despite the fact that the produced stream is bounded, the end bound is pretty far away. */ public class NumberSequenceSource implements Source<Long, NumberSequenceSource.NumberSequenceSplit, Collection<NumberSequenceSource.NumberSequenceSplit>>, ResultTypeQueryable<Long> { private static final long serialVersionUID = 1L; /** The starting number in the sequence, inclusive. */ private final long from; /** The end number in the sequence, inclusive. */ private final long to; /** * Creates a new {@code NumberSequenceSource} that produces parallel sequences covering the range * {@code from} to {@code to} (both boundaries are inclusive). */ public NumberSequenceSource(long from, long to) { checkArgument(from <= to, "'from' must be <= 'to'"); this.from = from; this.to = to; } @Override public TypeInformation<Long> getProducedType() { return Types.LONG; } @Override public Boundedness getBoundedness() { return Boundedness.BOUNDED; } @Override public SourceReader<Long, NumberSequenceSplit> createReader(SourceReaderContext readerContext) { return new IteratorSourceReader<>(readerContext); } @Override public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> createEnumerator( final SplitEnumeratorContext<NumberSequenceSplit> enumContext) { final NumberSequenceIterator[] subSequences = new NumberSequenceIterator(from, to).split(enumContext.currentParallelism()); final ArrayList<NumberSequenceSplit> splits = new ArrayList<>(subSequences.length); int splitId = 1; for (NumberSequenceIterator seq : subSequences) { splits.add(new NumberSequenceSplit(String.valueOf(splitId++), seq.getCurrent(), seq.getTo())); } return new IteratorSourceEnumerator<>(enumContext, splits); } @Override public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> restoreEnumerator( final SplitEnumeratorContext<NumberSequenceSplit> enumContext, Collection<NumberSequenceSplit> checkpoint) { return new IteratorSourceEnumerator<>(enumContext, checkpoint); } @Override public SimpleVersionedSerializer<NumberSequenceSplit> getSplitSerializer() { return new SplitSerializer(); } @Override public SimpleVersionedSerializer<Collection<NumberSequenceSplit>> getEnumeratorCheckpointSerializer() { return new CheckpointSerializer(); } // ------------------------------------------------------------------------ // splits & checkpoint // ------------------------------------------------------------------------ /** * A split of the source, representing a number sub-sequence. */ public static class NumberSequenceSplit implements IteratorSourceSplit<Long, NumberSequenceIterator> { private final String splitId; private final long from; private final long to; public NumberSequenceSplit(String splitId, long from, long to) { checkArgument(from <= to, "'from' must be <= 'to'"); this.splitId = checkNotNull(splitId); this.from = from; this.to = to; } @Override public String splitId() { return splitId; } public long from() { return from; } public long to() { return to; } @Override public NumberSequenceIterator getIterator() { return new NumberSequenceIterator(from, to); } @Override public IteratorSourceSplit<Long, NumberSequenceIterator> getUpdatedSplitForIterator( final NumberSequenceIterator iterator) { return new NumberSequenceSplit(splitId, iterator.getCurrent(), iterator.getTo()); } @Override public String toString() { return String.format("NumberSequenceSplit [%d, %d] (%s)", from, to, splitId); } } private static final class SplitSerializer implements SimpleVersionedSerializer<NumberSequenceSplit> { private static final int CURRENT_VERSION = 1; @Override public int getVersion() { return CURRENT_VERSION; } @Override public byte[] serialize(NumberSequenceSplit split) throws IOException { checkArgument(split.getClass() == NumberSequenceSplit.class, "cannot serialize subclasses"); // We will serialize 2 longs (16 bytes) plus the UFT representation of the string (2 + length) final DataOutputSerializer out = new DataOutputSerializer(split.splitId().length() + 18); serializeV1(out, split); return out.getCopyOfBuffer(); } @Override public NumberSequenceSplit deserialize(int version, byte[] serialized) throws IOException { if (version != CURRENT_VERSION) { throw new IOException("Unrecognized version: " + version); } final DataInputDeserializer in = new DataInputDeserializer(serialized); return deserializeV1(in); } static void serializeV1(DataOutputView out, NumberSequenceSplit split) throws IOException { out.writeUTF(split.splitId()); out.writeLong(split.from()); out.writeLong(split.to()); } static NumberSequenceSplit deserializeV1(DataInputView in) throws IOException { return new NumberSequenceSplit(in.readUTF(), in.readLong(), in.readLong()); } } private static final class CheckpointSerializer implements SimpleVersionedSerializer<Collection<NumberSequenceSplit>> { private static final int CURRENT_VERSION = 1; @Override public int getVersion() { return CURRENT_VERSION; } @Override public byte[] serialize(Collection<NumberSequenceSplit> checkpoint) throws IOException { // Each split needs 2 longs (16 bytes) plus the UFT representation of the string (2 + length) // Assuming at most 4 digit split IDs, 22 bytes per split avoids any intermediate array resizing. // plus four bytes for the length field final DataOutputSerializer out = new DataOutputSerializer(checkpoint.size() * 22 + 4); out.writeInt(checkpoint.size()); for (NumberSequenceSplit split : checkpoint) { SplitSerializer.serializeV1(out, split); } return out.getCopyOfBuffer(); } @Override public Collection<NumberSequenceSplit> deserialize(int version, byte[] serialized) throws IOException { if (version != CURRENT_VERSION) { throw new IOException("Unrecognized version: " + version); } final DataInputDeserializer in = new DataInputDeserializer(serialized); final int num = in.readInt(); final ArrayList<NumberSequenceSplit> result = new ArrayList<>(num); for (int remaining = num; remaining > 0; remaining--) { result.add(SplitSerializer.deserializeV1(in)); } return result; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.util; /** * Timers schedule one-shot or recurring {@link TimerTask tasks} for execution. * Prefer {@link java.util.concurrent.ScheduledThreadPoolExecutor * ScheduledThreadPoolExecutor} for new code. * * <p>Each timer has one thread on which tasks are executed sequentially. When * this thread is busy running a task, runnable tasks may be subject to delays. * * <p>One-shot are scheduled to run at an absolute time or after a relative * delay. * * <p>Recurring tasks are scheduled with either a fixed period or a fixed rate: * <ul> * <li>With the default <strong>fixed-period execution</strong>, each * successive run of a task is scheduled relative to the start time of * the previous run, so two runs are never fired closer together in time * than the specified {@code period}. * <li>With <strong>fixed-rate execution</strong>, the start time of each * successive run of a task is scheduled without regard for when the * previous run took place. This may result in a series of bunched-up runs * (one launched immediately after another) if delays prevent the timer * from starting tasks on time. * </ul> * * <p>When a timer is no longer needed, users should call {@link #cancel}, which * releases the timer's thread and other resources. Timers not explicitly * cancelled may hold resources indefinitely. * * <p>This class does not offer guarantees about the real-time nature of task * scheduling. Multiple threads can share a single timer without * synchronization. */ public class Timer { private static final class TimerImpl extends Thread { private static final class TimerHeap { private int DEFAULT_HEAP_SIZE = 256; private TimerTask[] timers = new TimerTask[DEFAULT_HEAP_SIZE]; private int size = 0; private int deletedCancelledNumber = 0; public TimerTask minimum() { return timers[0]; } public boolean isEmpty() { return size == 0; } public void insert(TimerTask task) { if (timers.length == size) { TimerTask[] appendedTimers = new TimerTask[size * 2]; System.arraycopy(timers, 0, appendedTimers, 0, size); timers = appendedTimers; } timers[size++] = task; upHeap(); } public void delete(int pos) { // posible to delete any position of the heap if (pos >= 0 && pos < size) { timers[pos] = timers[--size]; timers[size] = null; downHeap(pos); } } private void upHeap() { int current = size - 1; int parent = (current - 1) / 2; while (timers[current].when < timers[parent].when) { // swap the two TimerTask tmp = timers[current]; timers[current] = timers[parent]; timers[parent] = tmp; // update pos and current current = parent; parent = (current - 1) / 2; } } private void downHeap(int pos) { int current = pos; int child = 2 * current + 1; while (child < size && size > 0) { // compare the children if they exist if (child + 1 < size && timers[child + 1].when < timers[child].when) { child++; } // compare selected child with parent if (timers[current].when < timers[child].when) { break; } // swap the two TimerTask tmp = timers[current]; timers[current] = timers[child]; timers[child] = tmp; // update pos and current current = child; child = 2 * current + 1; } } public void reset() { timers = new TimerTask[DEFAULT_HEAP_SIZE]; size = 0; } public void adjustMinimum() { downHeap(0); } public void deleteIfCancelled() { for (int i = 0; i < size; i++) { if (timers[i].cancelled) { deletedCancelledNumber++; delete(i); // re-try this point i--; } } } private int getTask(TimerTask task) { for (int i = 0; i < timers.length; i++) { if (timers[i] == task) { return i; } } return -1; } } /** * True if the method cancel() of the Timer was called or the !!!stop() * method was invoked */ private boolean cancelled; /** * True if the Timer has become garbage */ private boolean finished; /** * Contains scheduled events, sorted according to * {@code when} field of TaskScheduled object. */ private TimerHeap tasks = new TimerHeap(); /** * Starts a new timer. * * @param name thread's name * @param isDaemon daemon thread or not */ TimerImpl(String name, boolean isDaemon) { this.setName(name); this.setDaemon(isDaemon); this.start(); } /** * This method will be launched on separate thread for each Timer * object. */ @Override public void run() { while (true) { TimerTask task; synchronized (this) { // need to check cancelled inside the synchronized block if (cancelled) { return; } if (tasks.isEmpty()) { if (finished) { return; } // no tasks scheduled -- sleep until any task appear try { this.wait(); } catch (InterruptedException ignored) { } continue; } long currentTime = System.currentTimeMillis(); task = tasks.minimum(); long timeToSleep; synchronized (task.lock) { if (task.cancelled) { tasks.delete(0); continue; } // check the time to sleep for the first task scheduled timeToSleep = task.when - currentTime; } if (timeToSleep > 0) { // sleep! try { this.wait(timeToSleep); } catch (InterruptedException ignored) { } continue; } // no sleep is necessary before launching the task synchronized (task.lock) { int pos = 0; if (tasks.minimum().when != task.when) { pos = tasks.getTask(task); } if (task.cancelled) { tasks.delete(tasks.getTask(task)); continue; } // set time to schedule task.setScheduledTime(task.when); // remove task from queue tasks.delete(pos); // set when the next task should be launched if (task.period >= 0) { // this is a repeating task, if (task.fixedRate) { // task is scheduled at fixed rate task.when = task.when + task.period; } else { // task is scheduled at fixed delay task.when = System.currentTimeMillis() + task.period; } // insert this task into queue insertTask(task); } else { task.when = 0; } } } boolean taskCompletedNormally = false; try { task.run(); taskCompletedNormally = true; } finally { if (!taskCompletedNormally) { synchronized (this) { cancelled = true; } } } } } private void insertTask(TimerTask newTask) { // callers are synchronized tasks.insert(newTask); this.notify(); } /** * Cancels timer. */ public synchronized void cancel() { cancelled = true; tasks.reset(); this.notify(); } public int purge() { if (tasks.isEmpty()) { return 0; } // callers are synchronized tasks.deletedCancelledNumber = 0; tasks.deleteIfCancelled(); return tasks.deletedCancelledNumber; } } private static final class FinalizerHelper { private final TimerImpl impl; FinalizerHelper(TimerImpl impl) { this.impl = impl; } @Override protected void finalize() throws Throwable { try { synchronized (impl) { impl.finished = true; impl.notify(); } } finally { super.finalize(); } } } private static long timerId; private synchronized static long nextId() { return timerId++; } /* This object will be used in synchronization purposes */ private final TimerImpl impl; // Used to finalize thread @SuppressWarnings("unused") private final FinalizerHelper finalizer; /** * Creates a new named {@code Timer} which may be specified to be run as a * daemon thread. * * @throws NullPointerException if {@code name == null} */ public Timer(String name, boolean isDaemon) { if (name == null) { throw new NullPointerException("name == null"); } this.impl = new TimerImpl(name, isDaemon); this.finalizer = new FinalizerHelper(impl); } /** * Creates a new named {@code Timer} which does not run as a daemon thread. * * @throws NullPointerException if {@code name == null} */ public Timer(String name) { this(name, false); } /** * Creates a new {@code Timer} which may be specified to be run as a daemon thread. * * @param isDaemon {@code true} if the {@code Timer}'s thread should be a daemon thread. */ public Timer(boolean isDaemon) { this("Timer-" + Timer.nextId(), isDaemon); } /** * Creates a new non-daemon {@code Timer}. */ public Timer() { this(false); } /** * Cancels the {@code Timer} and all scheduled tasks. If there is a * currently running task it is not affected. No more tasks may be scheduled * on this {@code Timer}. Subsequent calls do nothing. */ public void cancel() { impl.cancel(); } /** * Removes all canceled tasks from the task queue. If there are no * other references on the tasks, then after this call they are free * to be garbage collected. * * @return the number of canceled tasks that were removed from the task * queue. */ public int purge() { synchronized (impl) { return impl.purge(); } } /** * Schedule a task for single execution. If {@code when} is less than the * current time, it will be scheduled to be executed as soon as possible. * * @param task * the task to schedule. * @param when * time of execution. * @throws IllegalArgumentException * if {@code when.getTime() < 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void schedule(TimerTask task, Date when) { if (when.getTime() < 0) { throw new IllegalArgumentException("when < 0: " + when.getTime()); } long delay = when.getTime() - System.currentTimeMillis(); scheduleImpl(task, delay < 0 ? 0 : delay, -1, false); } /** * Schedule a task for single execution after a specified delay. * * @param task * the task to schedule. * @param delay * amount of time in milliseconds before execution. * @throws IllegalArgumentException * if {@code delay < 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void schedule(TimerTask task, long delay) { if (delay < 0) { throw new IllegalArgumentException("delay < 0: " + delay); } scheduleImpl(task, delay, -1, false); } /** * Schedule a task for repeated fixed-delay execution after a specific delay. * * @param task * the task to schedule. * @param delay * amount of time in milliseconds before first execution. * @param period * amount of time in milliseconds between subsequent executions. * @throws IllegalArgumentException * if {@code delay < 0} or {@code period <= 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void schedule(TimerTask task, long delay, long period) { if (delay < 0 || period <= 0) { throw new IllegalArgumentException(); } scheduleImpl(task, delay, period, false); } /** * Schedule a task for repeated fixed-delay execution after a specific time * has been reached. * * @param task * the task to schedule. * @param when * time of first execution. * @param period * amount of time in milliseconds between subsequent executions. * @throws IllegalArgumentException * if {@code when.getTime() < 0} or {@code period <= 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void schedule(TimerTask task, Date when, long period) { if (period <= 0 || when.getTime() < 0) { throw new IllegalArgumentException(); } long delay = when.getTime() - System.currentTimeMillis(); scheduleImpl(task, delay < 0 ? 0 : delay, period, false); } /** * Schedule a task for repeated fixed-rate execution after a specific delay * has passed. * * @param task * the task to schedule. * @param delay * amount of time in milliseconds before first execution. * @param period * amount of time in milliseconds between subsequent executions. * @throws IllegalArgumentException * if {@code delay < 0} or {@code period <= 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void scheduleAtFixedRate(TimerTask task, long delay, long period) { if (delay < 0 || period <= 0) { throw new IllegalArgumentException(); } scheduleImpl(task, delay, period, true); } /** * Schedule a task for repeated fixed-rate execution after a specific time * has been reached. * * @param task * the task to schedule. * @param when * time of first execution. * @param period * amount of time in milliseconds between subsequent executions. * @throws IllegalArgumentException * if {@code when.getTime() < 0} or {@code period <= 0}. * @throws IllegalStateException * if the {@code Timer} has been canceled, or if the task has been * scheduled or canceled. */ public void scheduleAtFixedRate(TimerTask task, Date when, long period) { if (period <= 0 || when.getTime() < 0) { throw new IllegalArgumentException(); } long delay = when.getTime() - System.currentTimeMillis(); scheduleImpl(task, delay, period, true); } /* * Schedule a task. */ private void scheduleImpl(TimerTask task, long delay, long period, boolean fixed) { synchronized (impl) { if (impl.cancelled) { throw new IllegalStateException("Timer was canceled"); } long when = delay + System.currentTimeMillis(); if (when < 0) { throw new IllegalArgumentException("Illegal delay to start the TimerTask: " + when); } synchronized (task.lock) { if (task.isScheduled()) { throw new IllegalStateException("TimerTask is scheduled already"); } if (task.cancelled) { throw new IllegalStateException("TimerTask is canceled"); } task.when = when; task.period = period; task.fixedRate = fixed; } // insert the newTask into queue impl.insertTask(task); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraffa; import static org.apache.giraffa.GiraffaConstants.FileState; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import java.net.ConnectException; import java.util.Collection; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.giraffa.hbase.INodeManager; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Time; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class TestLeaseManagement { private static final Log LOG = LogFactory.getLog(TestLeaseManagement.class); private static final HBaseTestingUtility UTIL = GiraffaTestUtils.getHBaseTestingUtility(); private GiraffaFileSystem grfs; private GiraffaConfiguration conf; private Connection connection; private INodeManager nodeManager; @BeforeClass public static void beforeClass() throws Exception { System.setProperty( HBaseCommonTestingUtility.BASE_TEST_DIRECTORY_KEY, GiraffaTestUtils.BASE_TEST_DIRECTORY); Configuration hbaseConf = UTIL.getConfiguration(); hbaseConf.setInt("hbase.assignment.maximum.attempts", 3); // Put meta on master to avoid meta server shutdown handling hbaseConf.set("hbase.balancer.tablesOnMaster", "hbase:meta"); hbaseConf.setInt("hbase.master.maximum.ping.server.attempts", 3); hbaseConf.setInt("hbase.master.ping.server.retry.sleep.interval", 1); hbaseConf.setBoolean("hbase.assignment.usezk", false); hbaseConf.setBoolean(DFS_PERMISSIONS_ENABLED_KEY, false); GiraffaTestUtils.enableMultipleUsers(); UTIL.startMiniCluster(1); } @Before public void before() throws IOException { conf = new GiraffaConfiguration(UTIL.getConfiguration()); conf.setBoolean("fs.grfa.impl.disable.cache", true); GiraffaTestUtils.setGiraffaURI(conf); GiraffaFileSystem.format(conf, false); grfs = (GiraffaFileSystem) FileSystem.get(conf); connection = ConnectionFactory.createConnection(conf); nodeManager = GiraffaTestUtils.getNodeManager(conf, connection); } @After public void after() throws IOException { IOUtils.cleanup(LOG, grfs, nodeManager, connection); } @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Test public void testLeaseCreation() throws IOException { String src = "/testLeaseCreation"; Path path = new Path(src); long currentTime = Time.now(); FSDataOutputStream outputStream = grfs.create(path); try { // check lease exists after creation checkLease(src, currentTime); // check lease renew currentTime = Time.now(); grfs.grfaClient.getNamespaceService().renewLease( grfs.grfaClient.getClientName()); checkLease(src, currentTime); } finally { IOUtils.closeStream(outputStream); } INodeFile iNode = INodeFile.valueOf(nodeManager.getINode(src)); assertThat(iNode.getFileState(), is(FileState.CLOSED)); FileLease lease = iNode.getLease(); assertThat(lease, is(nullValue())); } @Test public void testLeaseFailure() throws IOException { String src = "/testLeaseFailure"; Path path = new Path(src); long currentTime = Time.now(); FSDataOutputStream outputStream = grfs.create(path); try { checkLease(src, currentTime); try { grfs.create(path, false); fail("Expected AlreadyBeingCreatedException"); } catch (AlreadyBeingCreatedException e) {} // keep stream open intentionally checkLease(src, currentTime); } finally { IOUtils.closeStream(outputStream); } INodeFile iNode = INodeFile.valueOf(nodeManager.getINode(src)); assertThat(iNode.getFileState(), is(FileState.CLOSED)); FileLease lease = iNode.getLease(); assertThat(lease, is(nullValue())); } @Test public void testLeaseRecovery() throws IOException { String src = "/testLeaseRecovery"; Path path = new Path(src); HRegionServer server = UTIL.getHBaseCluster().getRegionServer(0); LeaseManager leaseManager = LeaseManager.originateSharedLeaseManager( server.getRpcServer().getListenerAddress().toString()); FSDataOutputStream outputStream = grfs.create(path); String clientName = grfs.grfaClient.getClientName(); outputStream.write(1); outputStream.write(2); outputStream.hflush(); try { leaseManager.setHardLimit(10L); INodeFile iNode = null; for(int i = 0; i < 100; i++) { leaseManager.triggerLeaseRecovery(); try {Thread.sleep(100L);} catch (InterruptedException ignored) {} iNode = INodeFile.valueOf(nodeManager.getINode(src)); if(iNode.getFileState() == FileState.CLOSED) break; } assertThat(iNode.getFileState(), is(FileState.CLOSED)); assertThat(iNode.getLen(), is(2L)); assertThat(iNode.getLease(), is(nullValue())); assertThat(leaseManager.getLeases(clientName), is(nullValue())); } finally { leaseManager.setHardLimit(HdfsConstants.LEASE_HARDLIMIT_PERIOD); IOUtils.closeStream(outputStream); } } @Test public void testClientLeaseRecovery() throws IOException { String src = "/testLeaseRecovery"; Path path = new Path(src); HRegionServer server = UTIL.getHBaseCluster().getRegionServer(0); LeaseManager leaseManager = LeaseManager.originateSharedLeaseManager( server.getRpcServer().getListenerAddress().toString()); FSDataOutputStream outputStream = grfs.create(path); String clientName = grfs.grfaClient.getClientName(); outputStream.write(1); outputStream.write(2); outputStream.hflush(); try { boolean recovered = grfs.grfaClient.getNamespaceService().recoverLease( src, grfs.grfaClient.getClientName()); assertThat(recovered, is(true)); INodeFile iNode = INodeFile.valueOf(nodeManager.getINode(src)); assertThat(iNode.getFileState(), is(FileState.CLOSED)); assertThat(iNode.getLen(), is(2L)); assertThat(iNode.getLease(), is(nullValue())); assertThat(leaseManager.getLeases(clientName), is(nullValue())); } finally { IOUtils.closeStream(outputStream); } } /** * This test shows that if a Region is to "migrate", either by split * or by RegionServer shutdown, that an incomplete file with a lease migrates * with the Region and that the lease is reloaded upon open and stays valid. */ @Test public void testLeaseMigration() throws Exception { String src = "/testLeaseFailure"; Path path = new Path(src); FSDataOutputStream outputStream = grfs.create(path); MiniHBaseCluster cluster = UTIL.getHBaseCluster(); try { // keep stream open intentionally HRegionServer newServer = cluster.startRegionServer().getRegionServer(); newServer.waitForServerOnline(); HRegionServer dyingServer = cluster.getRegionServer(0); cluster.stopRegionServer(dyingServer.getServerName()); cluster.waitForRegionServerToStop(dyingServer.getServerName(), 10000L); INodeFile iNode = null; do { try { IOUtils.cleanup(LOG, connection); connection = ConnectionFactory.createConnection(conf); IOUtils.cleanup(LOG, nodeManager); nodeManager = GiraffaTestUtils.getNodeManager(conf, connection); iNode = INodeFile.valueOf(nodeManager.getINode(src)); } catch (ConnectException ignored) {} } while(iNode == null); FileLease rowLease = iNode.getLease(); LeaseManager leaseManager = LeaseManager.originateSharedLeaseManager( newServer.getRpcServer().getListenerAddress().toString()); Collection<FileLease> leases = leaseManager.getLeases(rowLease.getHolder()); assertThat(leases.size(), is(1)); FileLease leaseManagerLease = leases.iterator().next(); // The following asserts are here to highlight that as a result of // migrating the FileLease across RegionServers we lose expiration date // consistency between the row field and the LeaseManager. assertThat(rowLease, is(not(equalTo(leaseManagerLease)))); assertThat(rowLease.getHolder(), is(equalTo(leaseManagerLease.getHolder()))); assertThat(rowLease.getPath(), is(equalTo(leaseManagerLease.getPath()))); assertThat(rowLease.getLastUpdate(), is(not(equalTo(leaseManagerLease.getLastUpdate())))); // Renewing the lease restores the consistency. grfs.grfaClient.getNamespaceService().renewLease( grfs.grfaClient.getClientName()); iNode = INodeFile.valueOf(nodeManager.getINode(src)); rowLease = iNode.getLease(); leases = leaseManager.getLeases(rowLease.getHolder()); assertThat(leases.size(), is(1)); leaseManagerLease = leases.iterator().next(); assertThat(rowLease, is(equalTo(leaseManagerLease))); } finally { IOUtils.cleanup(LOG, outputStream); } INodeFile iNode = INodeFile.valueOf(nodeManager.getINode(src)); assertThat(iNode.getFileState(), is(FileState.CLOSED)); FileLease lease = iNode.getLease(); assertThat(lease, is(nullValue())); } void checkLease(String src, long currentTime) throws IOException { INodeFile iNode = INodeFile.valueOf(nodeManager.getINode(src)); FileLease lease = iNode.getLease(); assertThat(iNode.getFileState(), is(FileState.UNDER_CONSTRUCTION)); assertThat(lease, is(notNullValue())); assertThat(lease.getHolder(), is(grfs.grfaClient.getClientName())); assertThat(lease.getPath(), is(src)); assertThat(lease.getLastUpdate() >= currentTime, is(true)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.testing; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import com.google.common.base.Strings; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CustomCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for {@link CoderProperties}. */ @RunWith(JUnit4.class) public class CoderPropertiesTest { @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testGoodCoderIsDeterministic() throws Exception { CoderProperties.coderDeterministic(StringUtf8Coder.of(), "TestData", "TestData"); } /** A coder that says it is not deterministic but actually is. */ public static class NonDeterministicCoder extends CustomCoder<String> { @Override public void encode(String value, OutputStream outStream, Context context) throws CoderException, IOException { StringUtf8Coder.of().encode(value, outStream, context); } @Override public String decode(InputStream inStream, Context context) throws CoderException, IOException { return StringUtf8Coder.of().decode(inStream, context); } } @Test public void testNonDeterministicCoder() throws Exception { try { CoderProperties.coderDeterministic(new NonDeterministicCoder(), "TestData", "TestData"); fail("Expected AssertionError"); } catch (AssertionError error) { assertThat(error.getMessage(), CoreMatchers.containsString("Expected that the coder is deterministic")); } } @Test public void testPassingInNonEqualValuesWithDeterministicCoder() throws Exception { try { CoderProperties.coderDeterministic(StringUtf8Coder.of(), "AAA", "BBB"); fail("Expected AssertionError"); } catch (AssertionError error) { assertThat(error.getMessage(), CoreMatchers.containsString("Expected that the passed in values")); } } /** A coder that is non-deterministic because it adds a string to the value. */ private static class BadDeterminsticCoder extends CustomCoder<String> { public BadDeterminsticCoder() { } @Override public void encode(String value, OutputStream outStream, Context context) throws IOException, CoderException { StringUtf8Coder.of().encode(value + System.nanoTime(), outStream, context); } @Override public String decode(InputStream inStream, Context context) throws CoderException, IOException { return StringUtf8Coder.of().decode(inStream, context); } @Override public void verifyDeterministic() throws NonDeterministicException { } } @Test public void testBadCoderIsNotDeterministic() throws Exception { try { CoderProperties.coderDeterministic(new BadDeterminsticCoder(), "TestData", "TestData"); fail("Expected AssertionError"); } catch (AssertionError error) { assertThat(error.getMessage(), CoreMatchers.containsString("<84>, <101>, <115>, <116>, <68>")); } } @Test public void testGoodCoderEncodesEqualValues() throws Exception { CoderProperties.coderDecodeEncodeEqual(StringUtf8Coder.of(), "TestData"); } /** This coder changes state during encoding/decoding. */ private static class StateChangingSerializingCoder extends CustomCoder<String> { private int changedState; public StateChangingSerializingCoder() { changedState = 10; } @Override public void encode(String value, OutputStream outStream, Context context) throws CoderException, IOException { changedState += 1; StringUtf8Coder.of().encode(value + Strings.repeat("A", changedState), outStream, context); } @Override public String decode(InputStream inStream, Context context) throws CoderException, IOException { String decodedValue = StringUtf8Coder.of().decode(inStream, context); return decodedValue.substring(0, decodedValue.length() - changedState); } } @Test public void testBadCoderThatDependsOnChangingState() throws Exception { try { CoderProperties.coderDecodeEncodeEqual(new StateChangingSerializingCoder(), "TestData"); fail("Expected AssertionError"); } catch (AssertionError error) { assertThat(error.getMessage(), CoreMatchers.containsString("TestData")); } } /** This coder loses information critical to its operation. */ private static class ForgetfulSerializingCoder extends CustomCoder<String> { private transient int lostState; public ForgetfulSerializingCoder(int lostState) { this.lostState = lostState; } @Override public void encode(String value, OutputStream outStream, Context context) throws CoderException, IOException { if (lostState == 0) { throw new RuntimeException("I forgot something..."); } StringUtf8Coder.of().encode(value, outStream, context); } @Override public String decode(InputStream inStream, Context context) throws CoderException, IOException { return StringUtf8Coder.of().decode(inStream, context); } } @Test public void testBadCoderThatDependsOnStateThatIsLost() throws Exception { expectedException.expect(RuntimeException.class); expectedException.expectMessage("I forgot something..."); CoderProperties.coderDecodeEncodeEqual(new ForgetfulSerializingCoder(1), "TestData"); } /** A coder which closes the underlying stream during encoding and decoding. */ public static class ClosingCoder extends CustomCoder<String> { @Override public void encode(String value, OutputStream outStream, Context context) throws IOException { outStream.close(); } @Override public String decode(InputStream inStream, Context context) throws IOException { inStream.close(); return null; } } @Test public void testClosingCoderFailsWhenDecoding() throws Exception { expectedException.expect(UnsupportedOperationException.class); expectedException.expectMessage("Caller does not own the underlying"); CoderProperties.decode(new ClosingCoder(), Context.NESTED, new byte[0]); } @Test public void testClosingCoderFailsWhenEncoding() throws Exception { expectedException.expect(UnsupportedOperationException.class); expectedException.expectMessage("Caller does not own the underlying"); CoderProperties.encode(new ClosingCoder(), Context.NESTED, "test-value"); } /** Coder that consumes more bytes while decoding than required. */ public static class BadCoderThatConsumesMoreBytes extends NonDeterministicCoder { @Override public String decode(InputStream inStream, Context context) throws IOException { String value = super.decode(inStream, context); inStream.read(); return value; } } @Test public void testCoderWhichConsumesMoreBytesThanItProducesFail() throws IOException { try { BadCoderThatConsumesMoreBytes coder = new BadCoderThatConsumesMoreBytes(); byte[] bytes = CoderProperties.encode(coder, Context.NESTED, "TestData"); CoderProperties.decode(coder, Context.NESTED, bytes); Assert.fail("Expected Assertion Error"); } catch (AssertionError error) { assertThat(error.getMessage(), CoreMatchers.containsString("consumed bytes equal to encoded bytes")); } } }
package src.usi; import java.io.File; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import resources.src.usi.MainHelper; import src.usi.configuration.ConfigurationManager; import src.usi.configuration.ExperimentManager; import src.usi.configuration.PathsManager; import src.usi.gui.GUIParser; import src.usi.gui.functionality.GUIFunctionality_refine; import src.usi.gui.functionality.GUIFunctionality_search; import src.usi.gui.functionality.GUIFunctionality_validate; import src.usi.gui.functionality.instance.Instance_GUI_pattern; import src.usi.gui.functionality.instance.Instance_GUI_patternWriter; import src.usi.gui.functionality.instance.Instance_window; import src.usi.gui.structure.Action_widget; import src.usi.gui.structure.GUI; import src.usi.gui.structure.Input_widget; import src.usi.gui.structure.Selectable_widget; import src.usi.gui.structure.Window; import src.usi.pattern.GUIPatternParser; import src.usi.pattern.Patterns; import src.usi.pattern.structure.GUI_Pattern; import src.usi.pattern.structure.Pattern_action_widget; import src.usi.pattern.structure.Pattern_input_widget; import src.usi.pattern.structure.Pattern_selectable_widget; import src.usi.testcase.GUITestCaseResult; import src.usi.xml.XMLUtil; /** * Description : Functional Test Script * * @author usi */ public class Main extends MainHelper { public static void main(final Object[] args) throws Exception { final Main r = new Main(); r.testMain(args); } /** * Script Name : <b>Main</b> Generated : <b>Jul 7, 2016 7:55:06 AM</b> * Description : Functional Test Script Original Host : WinNT Version 6.1 * Build 7601 (S) * * @since 2016/07/07 * @author usi * @throws Exception */ public void testMain(final Object[] args) throws Exception { final long beginTime = System.currentTimeMillis(); switch (args.length) { case 1: PathsManager.setProjectRoot(args[0].toString()); break; default: System.out.println("Error: wrong number of parameters."); return; } ConfigurationManager.load(); ExperimentManager.init(); final String out_folder = ExperimentManager.createResultsFolder(); // we set the stdout as a log file final PrintStream generallog = new PrintStream(new FileOutputStream(out_folder + "out.log")); final PrintStream generalerr = new PrintStream(new FileOutputStream(out_folder + "out.err")); try { System.setOut(generallog); System.setErr(generalerr); ExperimentManager.moveFile(ConfigurationManager.getLoadedFilePath(), out_folder); // GUI final GUI gui = GUIParser.parse(XMLUtil.read(ConfigurationManager.getGUIFile())); ExperimentManager.moveFile(ConfigurationManager.getGUIFile(), out_folder); // guipatterns final GUI_Pattern[] patterns = new GUI_Pattern[Patterns.values().length]; final String[] patterns_name = new String[Patterns.values().length]; for (int x = 0; x < Patterns.values().length; x++) { patterns[x] = GUIPatternParser.parse(XMLUtil.read(PathsManager .getGUIPatternsFolder() + Patterns.values()[x].name)); patterns_name[x] = Patterns.values()[x].name.replace(".xml", ""); } // final Map<GUI_Pattern, List<Instance_GUI_pattern>> // candidate_instances = new HashMap<>(); final GUI gui_app = GUIParser.parse(XMLUtil.read(ConfigurationManager.getGUIFile())); final Map<GUI_Pattern, List<Instance_GUI_pattern>> true_instances = new HashMap<>(); for (int x = 0; x < patterns.length; x++) { final GUI_Pattern pattern = patterns[x]; // search final GUIFunctionality_search searcher = new GUIFunctionality_search(gui_app); final List<Instance_GUI_pattern> new_instances = searcher.match(pattern); // candidate_instances.put(pattern, new_instances); System.out.println("FOUND " + new_instances.size() + " CANDIDATE MATCHES FOR " + patterns_name[x]); // refine final List<Instance_GUI_pattern> refined_instances = new ArrayList<>(); true_instances.put(pattern, refined_instances); for (int y = 0; y < new_instances.size(); y++) { final String match_folder = out_folder + File.separator + patterns_name[x] + "_match_" + (y + 1) + File.separator; ExperimentManager.createFolder(match_folder); // we set the stdout as a log file final PrintStream reflog = new PrintStream(new FileOutputStream(match_folder + "refinement.log")); System.setOut(reflog); final Instance_GUI_pattern inst = new_instances.get(y); // we need to rebuild the instance final GUI newgui = new GUI(); final Instance_GUI_pattern instance = new Instance_GUI_pattern(newgui, inst.getGuipattern()); for (final Instance_window iiww : inst.getWindows()) { final Instance_window newiw = new Instance_window(iiww.getPattern(), gui.getWindow(iiww.getInstance().getId())); instance.addWindow(newiw); if (!newgui.containsWindow(iiww.getInstance().getId())) { newgui.addWindow(gui.getWindow(iiww.getInstance().getId())); } for (final Pattern_action_widget paw : iiww.getPattern().getActionWidgets()) { final List<Action_widget> mapping = new ArrayList<>(); for (final Action_widget aw : iiww.getAWS_for_PAW(paw.getId())) { mapping.add((Action_widget) gui.getWindow( iiww.getInstance().getId()).getWidget(aw.getId())); } newiw.addAW_mapping(paw, mapping); } for (final Pattern_input_widget piw : iiww.getPattern().getInputWidgets()) { final List<Input_widget> mapping = new ArrayList<>(); for (final Input_widget iw : iiww.getIWS_for_PIW(piw.getId())) { mapping.add((Input_widget) gui .getWindow(iiww.getInstance().getId()) .getWidget(iw.getId())); } newiw.addIW_mapping(piw, mapping); } for (final Pattern_selectable_widget psw : iiww.getPattern() .getSelectableWidgets()) { final List<Selectable_widget> mapping = new ArrayList<>(); for (final Selectable_widget sw : iiww.getSWS_for_PSW(psw.getId())) { mapping.add((Selectable_widget) gui.getWindow( iiww.getInstance().getId()).getWidget(sw.getId())); } newiw.addSW_mapping(psw, mapping); } } for (final Action_widget aw : inst.getGui().getAction_widgets()) { for (final Window w : inst.getGui().getStaticForwardLinks(aw.getId())) { newgui.addStaticEdge(aw.getId(), w.getId()); } } final GUIFunctionality_refine refiner = new GUIFunctionality_refine(instance, gui); final Instance_GUI_pattern refined_instance = refiner.refine(); refined_instances.add(refined_instance); if (refined_instance != null) { XMLUtil.save(match_folder + File.separator + "match.xml", Instance_GUI_patternWriter .writeInstanceGUIPattern(refined_instance)); // we remove the instance elements from the gui so that // they // cannot be added to a match of another pattern for (final Instance_window iw : refined_instance.getWindows()) { final Window w = gui_app.getWindow(iw.getInstance().getId()); if (w == null) { continue; } for (final Pattern_action_widget paw : iw.getPattern() .getActionWidgets()) { for (final Action_widget aw : iw.getAWS_for_PAW(paw.getId())) { for (final Window tw : gui_app.getDynamicForwardLinks(aw .getId())) { gui_app.removeDynamicEdge(aw.getId(), tw.getId()); } for (final Window tw : gui_app .getStaticForwardLinks(aw.getId())) { gui_app.removeStaticEdge(aw.getId(), tw.getId()); } w.removeWidget(aw.getId()); } } for (final Pattern_input_widget piw : iw.getPattern().getInputWidgets()) { for (final Input_widget iiw : iw.getIWS_for_PIW(piw.getId())) { w.removeWidget(iiw.getId()); } } for (final Pattern_selectable_widget psw : iw.getPattern() .getSelectableWidgets()) { for (final Selectable_widget sw : iw.getSWS_for_PSW(psw.getId())) { w.removeWidget(sw.getId()); } } } } reflog.close(); } System.out.println(patterns_name[x] + ": FOUND " + refined_instances.size() + " true instances."); } System.setOut(generallog); // refinement coverage final double[] cov_before = ExperimentManager.getCoverage(); System.out.println("COVERAGE ACHIEVED DURING REFINEMENT:" + System.lineSeparator() + "statement " + cov_before[0] + ", branch " + cov_before[1]); // reset coverage ExperimentManager.resetCoverage(); // validate for (int x = 0; x < patterns.length; x++) { final GUI_Pattern pattern = patterns[x]; for (int y = 0; y < true_instances.get(pattern).size(); y++) { // if (y != 4) { // continue; // } if (true_instances.get(pattern).get(y) != null) { final String match_folder = out_folder + File.separator + patterns_name[x] + "_match_" + (y + 1) + File.separator + "testcases" + File.separator; ExperimentManager.createFolder(match_folder); final PrintStream vallog = new PrintStream(new FileOutputStream( match_folder + "validator.log")); System.setOut(vallog); final Instance_GUI_pattern instance = true_instances.get(pattern).get(y); final GUIFunctionality_validate validator = new GUIFunctionality_validate( instance, gui); final List<GUITestCaseResult> results = validator.validate(); ExperimentManager.dumpTCresults(match_folder, results, gui); vallog.close(); } } } System.setOut(generallog); // validation coverage final double[] cov_after = ExperimentManager.getCoverage(); System.out.println("COVERAGE ACHIEVED DURING VALIDATION:" + System.lineSeparator() + "statement " + cov_after[0] + ", branch " + cov_after[1]); final long tottime = (System.currentTimeMillis() - beginTime) / 1000; System.out.println("MAIN ELAPSED TIME: " + tottime); generallog.close(); ExperimentManager.moveFile(System.getProperty("user.dir") + File.separator + "aut.log", out_folder); ExperimentManager.cleanUP(); } catch (final Exception e) { System.setOut(generallog); System.out.println("ERROR"); e.printStackTrace(); } finally { ExperimentManager.cleanUP(); } } }
/* * Copyright 2012 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.appengine.tools.cloudstorage; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.io.Serializable; import java.util.HashMap; import java.util.Map; /** * Container class for holding options for creating Google Storage files. * * To construct {@code GcsFileOptions}, first create a {@link GcsFileOptions.Builder}. The builder * is mutable and each of the parameters can be set (any unset parameters will fallback to the * defaults). The {@code Builder} can be then used to create an immutable {@code GcsFileOptions} * object. * * For default {@code GcsFileOptions} use {@link #getDefaultInstance}. Default settings are subject * to change release to release. Currently the default values are to not specify any of the options. * If you require specific settings, explicitly create an instance of {@code GcsFileOptions} with * the required settings. * * @see <a href="http://code.google.com/apis/storage/">Google Storage API</a> */ public final class GcsFileOptions implements Serializable { private static final long serialVersionUID = -7350111525144535653L; private final String mimeType;private final String acl;private final String cacheControl;private final String contentEncoding;private final String contentDisposition; private final ImmutableMap<String, String> userMetadata; private static final GcsFileOptions DEFAULT_INSTANCE = new GcsFileOptions(new Builder()); private GcsFileOptions(Builder builder) { this.mimeType = builder.mimeType; this.acl = builder.acl; this.cacheControl = builder.cacheControl; this.contentEncoding = builder.contentEncoding; this.contentDisposition = builder.contentDisposition; this.userMetadata = ImmutableMap.copyOf(builder.userMetadata); } /** * Retrieve an instance with the default parameters */ public static GcsFileOptions getDefaultInstance() { return DEFAULT_INSTANCE; } /** * @return The mime type for the file. (May be null) */ public String getMimeType() { return mimeType; } /** * @return The acl for the file. (May be null) */ public String getAcl() { return acl; } /** * @return The cache control string for the file. (May be null) */ public String getCacheControl() { return cacheControl; } /** * @return The content encoding of the file. (May be null) */ public String getContentEncoding() { return contentEncoding; } /** * @return The content disposition of the file. (May be null) */ public String getContentDisposition() { return contentDisposition; } /** * @return Any user data associated with the file. (This map is unmodifiable) */ public Map<String, String> getUserMetadata() { return userMetadata; } @Override public String toString() { return "GcsFileOptions [" + (mimeType != null ? "mimeType=" + mimeType + ", " : "") + (acl != null ? "acl=" + acl + ", " : "") + (cacheControl != null ? "cacheControl=" + cacheControl + ", " : "") + (contentEncoding != null ? "contentEncoding=" + contentEncoding + ", " : "") + (contentDisposition != null ? "contentDisposition=" + contentDisposition + ", " : "") + (userMetadata != null ? "userMetadata=" + userMetadata : "") + "]"; } @Override public final boolean equals(Object o) { if (o == this) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GcsFileOptions other = (GcsFileOptions) o; return Objects.equal(mimeType, other.mimeType) && Objects.equal(acl, other.acl) && Objects.equal(cacheControl, other.cacheControl) && Objects.equal(contentEncoding, other.contentEncoding) && Objects.equal(contentDisposition, other.contentDisposition) && Objects.equal(userMetadata, other.userMetadata); } @Override public final int hashCode() { return Objects.hashCode(mimeType, acl, cacheControl, contentEncoding, contentDisposition, userMetadata); } /** * A builder of GcsFileOptions. */ public static final class Builder { private String mimeType;private String acl;private String cacheControl;private String contentEncoding;private String contentDisposition; private final Map<String, String> userMetadata; public Builder() { this.userMetadata = new HashMap<String, String>(); } /** * Sets the mime type of the object. If not set, default Google Storage mime type is used when * served out of Google Storage. * {@link "http://code.google.com/apis/storage/docs/reference-headers.html#contenttype"} * * @param mimeType of the Google Storage object. * @return this builder for chaining. */ public Builder mimeType(String mimeType) { this.mimeType = checkNotEmpty(mimeType, "MIME type"); return this; } /** * Sets the acl of the object. If not set, defaults to none (i.e., bucket default). * {@link "http://code.google.com/apis/storage/docs/accesscontrol.html"} * * @param acl to use for the Google Storage object. * @return this builder for chaining. */ public Builder acl(String acl) { this.acl = checkNotEmpty(acl, "ACL"); return this; } /** * Sets the cache control for the object. If not set, default value is used. * {@link "http://code.google.com/apis/storage/docs/reference-headers.html#cachecontrol"} * * @param cacheControl to use for the Google Storage object. * @return this builder for chaining. */ public Builder cacheControl(String cacheControl) { this.cacheControl = checkNotEmpty(cacheControl, "cache control"); return this; } /** * Sets the content encoding for the object. If not set, default value is used. * {@link "http://code.google.com/apis/storage/docs/reference-headers.html#contentencoding"} * * @param contentEncoding to use for the Google Storage object. * @return this builder for chaining. */ public Builder contentEncoding(String contentEncoding) { this.contentEncoding = checkNotEmpty(contentEncoding, "content encoding"); return this; } /** * Sets the content disposition for the object. If not set, default value is used. * {@link "http://code.google.com/apis/storage/docs/reference-headers.html#contentdisposition"} * * @param contentDisposition to use for the Google Storage object. * @return this builder for chaining. */ public Builder contentDisposition(String contentDisposition) { this.contentDisposition = checkNotEmpty(contentDisposition, "content disposition"); return this; } /** * Adds user specific metadata that will be added to object headers when served through Google * Storage: {@link "http://code.google.com/apis/storage/docs/reference-headers.html#xgoogmeta"} * Each entry will be prefixed with x-goog-meta- when serving out. For example, if you add * 'foo'->'bar' entry to userMetadata map, it will be served out as a header: 'x-goog-meta-foo: * bar'. * * @param key metadata/header name suffix * @param value metadata/header value * @return this builder for chaining. */ public Builder addUserMetadata(String key, String value) { checkNotEmpty(key, "key"); checkNotEmpty(value, "value"); userMetadata.put(key, value); return this; } private static String checkNotEmpty(String value, String what) { Preconditions.checkNotNull(value, "Null %s", what); Preconditions.checkArgument(!value.isEmpty(), "Empty %s", what); return value; } /** * Create an instance of GcsFileOptions with the parameters set in this builder * * @return a new instance of GcsFileOptions */ public GcsFileOptions build() { return new GcsFileOptions(this); } } }
package org.knowm.xchange.bitfinex.service; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import java.io.IOException; import java.math.BigDecimal; import java.math.MathContext; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import org.knowm.xchange.bitfinex.v1.BitfinexOrderType; import org.knowm.xchange.bitfinex.v1.BitfinexUtils; import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexAccountFeesResponse; import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexBalancesResponse; import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexDepositWithdrawalHistoryResponse; import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexTradingFeeResponse; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexDepth; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexLendLevel; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexLevel; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexSymbolDetail; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexTicker; import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexTrade; import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexAccountInfosResponse; import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexOrderFlags; import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexOrderStatusResponse; import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexTradeResponse; import org.knowm.xchange.bitfinex.v2.dto.account.Movement; import org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexPublicTrade; import org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexTickerFundingCurrency; import org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexTickerTraidingPair; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.Fee; import org.knowm.xchange.dto.account.FundingRecord; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.marketdata.Trades; import org.knowm.xchange.dto.marketdata.Trades.TradeSortType; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.trade.FixedRateLoanOrder; import org.knowm.xchange.dto.trade.FloatingRateLoanOrder; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.MarketOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.dto.trade.StopOrder; import org.knowm.xchange.dto.trade.UserTrade; import org.knowm.xchange.dto.trade.UserTrades; import org.knowm.xchange.utils.DateUtils; import org.knowm.xchange.utils.jackson.CurrencyPairDeserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class BitfinexAdapters { public static final Logger log = LoggerFactory.getLogger(BitfinexAdapters.class); private static final ObjectMapper mapper = new ObjectMapper(); private static final AtomicBoolean warnedStopLimit = new AtomicBoolean(); private BitfinexAdapters() {} /** * Each element in the response array contains a set of currencies that are at a given fee tier. * The API returns the fee per currency in each tier and does not make any promises that they are * all the same, so this adapter will use the fee per currency instead of the fee per tier. */ public static Map<CurrencyPair, Fee> adaptDynamicTradingFees( BitfinexTradingFeeResponse[] responses, List<CurrencyPair> currencyPairs) { Map<CurrencyPair, Fee> result = new HashMap<>(); for (BitfinexTradingFeeResponse response : responses) { BitfinexTradingFeeResponse.BitfinexTradingFeeResponseRow[] responseRows = response.getTradingFees(); for (BitfinexTradingFeeResponse.BitfinexTradingFeeResponseRow responseRow : responseRows) { Currency currency = Currency.getInstance(responseRow.getCurrency()); BigDecimal percentToFraction = BigDecimal.ONE.divide(BigDecimal.ONE.scaleByPowerOfTen(2)); Fee fee = new Fee( responseRow.getMakerFee().multiply(percentToFraction), responseRow.getTakerFee().multiply(percentToFraction)); for (CurrencyPair pair : currencyPairs) { // Fee to trade for a currency is the fee to trade currency pairs with this base. // Fee is typically assessed in units counter. if (pair.base.equals(currency)) { if (result.put(pair, fee) != null) { throw new IllegalStateException( "Fee for currency pair " + pair + " is overspecified"); } } } } } return result; } public static String adaptBitfinexCurrency(String bitfinexSymbol) { return bitfinexSymbol.toUpperCase(); } public static String adaptOrderType(OrderType type) { switch (type) { case BID: case EXIT_BID: return "buy"; case ASK: case EXIT_ASK: return "sell"; } throw new IllegalArgumentException(String.format("Unexpected type of order: %s", type)); } public static BitfinexOrderType adaptOrderFlagsToType(Set<Order.IOrderFlags> flags) { if (flags.contains(BitfinexOrderFlags.MARGIN)) { if (flags.contains(BitfinexOrderFlags.FILL_OR_KILL)) { return BitfinexOrderType.MARGIN_FILL_OR_KILL; } else if (flags.contains(BitfinexOrderFlags.TRAILING_STOP)) { return BitfinexOrderType.MARGIN_TRAILING_STOP; } else if (flags.contains(BitfinexOrderFlags.STOP)) { return BitfinexOrderType.MARGIN_STOP; } else { return BitfinexOrderType.MARGIN_LIMIT; } } else { if (flags.contains(BitfinexOrderFlags.FILL_OR_KILL)) { return BitfinexOrderType.FILL_OR_KILL; } else if (flags.contains(BitfinexOrderFlags.TRAILING_STOP)) { return BitfinexOrderType.TRAILING_STOP; } else if (flags.contains(BitfinexOrderFlags.STOP)) { return BitfinexOrderType.STOP; } else { return BitfinexOrderType.LIMIT; } } } public static CurrencyPair adaptCurrencyPair(String bitfinexSymbol) { String tradableIdentifier; String transactionCurrency; int startIndex = bitfinexSymbol.startsWith("t") && Character.isUpperCase(bitfinexSymbol.charAt(1)) ? 1 : 0; if (bitfinexSymbol.contains(":")) { // ie 'dusk:usd' or 'btc:cnht' int idx = bitfinexSymbol.indexOf(":"); tradableIdentifier = bitfinexSymbol.substring(startIndex, idx); transactionCurrency = bitfinexSymbol.substring(idx + 1); } else { tradableIdentifier = bitfinexSymbol.substring(startIndex, startIndex + 3); transactionCurrency = bitfinexSymbol.substring(startIndex + 3); } return new CurrencyPair( adaptBitfinexCurrency(tradableIdentifier), adaptBitfinexCurrency(transactionCurrency)); } public static OrderStatus adaptOrderStatus(BitfinexOrderStatusResponse order) { if (order.isCancelled()) return OrderStatus.CANCELED; else if (order.getExecutedAmount().compareTo(BigDecimal.ZERO) == 0) return OrderStatus.NEW; else if (order.getExecutedAmount().compareTo(order.getOriginalAmount()) < 0) return OrderStatus.PARTIALLY_FILLED; else if (order.getExecutedAmount().compareTo(order.getOriginalAmount()) == 0) return OrderStatus.FILLED; else return null; } public static String adaptCurrencyPair(CurrencyPair pair) { return BitfinexUtils.toPairString(pair); } public static OrderBook adaptOrderBook(BitfinexDepth btceDepth, CurrencyPair currencyPair) { OrdersContainer asksOrdersContainer = adaptOrders(btceDepth.getAsks(), currencyPair, OrderType.ASK); OrdersContainer bidsOrdersContainer = adaptOrders(btceDepth.getBids(), currencyPair, OrderType.BID); return new OrderBook( new Date(Math.max(asksOrdersContainer.getTimestamp(), bidsOrdersContainer.getTimestamp())), asksOrdersContainer.getLimitOrders(), bidsOrdersContainer.getLimitOrders()); } public static OrdersContainer adaptOrders( BitfinexLevel[] bitfinexLevels, CurrencyPair currencyPair, OrderType orderType) { BigDecimal maxTimestamp = new BigDecimal(Long.MIN_VALUE); List<LimitOrder> limitOrders = new ArrayList<>(bitfinexLevels.length); for (BitfinexLevel bitfinexLevel : bitfinexLevels) { if (bitfinexLevel.getTimestamp().compareTo(maxTimestamp) > 0) { maxTimestamp = bitfinexLevel.getTimestamp(); } Date timestamp = convertBigDecimalTimestampToDate(bitfinexLevel.getTimestamp()); limitOrders.add( adaptOrder( bitfinexLevel.getAmount(), bitfinexLevel.getPrice(), currencyPair, orderType, timestamp)); } long maxTimestampInMillis = maxTimestamp.multiply(new BigDecimal(1000L)).longValue(); return new OrdersContainer(maxTimestampInMillis, limitOrders); } public static LimitOrder adaptOrder( BigDecimal originalAmount, BigDecimal price, CurrencyPair currencyPair, OrderType orderType, Date timestamp) { return new LimitOrder(orderType, originalAmount, currencyPair, "", timestamp, price); } public static List<FixedRateLoanOrder> adaptFixedRateLoanOrders( BitfinexLendLevel[] orders, String currency, String orderType, String id) { List<FixedRateLoanOrder> loanOrders = new ArrayList<>(orders.length); for (BitfinexLendLevel order : orders) { if ("yes".equalsIgnoreCase(order.getFrr())) { continue; } // Bid orderbook is reversed order. Insert at reversed indices if (orderType.equalsIgnoreCase("loan")) { loanOrders.add( 0, adaptFixedRateLoanOrder( currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate())); } else { loanOrders.add( adaptFixedRateLoanOrder( currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate())); } } return loanOrders; } public static FixedRateLoanOrder adaptFixedRateLoanOrder( String currency, BigDecimal amount, int dayPeriod, String direction, String id, BigDecimal rate) { OrderType orderType = direction.equalsIgnoreCase("loan") ? OrderType.BID : OrderType.ASK; return new FixedRateLoanOrder(orderType, currency, amount, dayPeriod, id, null, rate); } public static List<FloatingRateLoanOrder> adaptFloatingRateLoanOrders( BitfinexLendLevel[] orders, String currency, String orderType, String id) { List<FloatingRateLoanOrder> loanOrders = new ArrayList<>(orders.length); for (BitfinexLendLevel order : orders) { if ("no".equals(order.getFrr())) { continue; } // Bid orderbook is reversed order. Insert at reversed indices if (orderType.equalsIgnoreCase("loan")) { loanOrders.add( 0, adaptFloatingRateLoanOrder( currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate())); } else { loanOrders.add( adaptFloatingRateLoanOrder( currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate())); } } return loanOrders; } public static FloatingRateLoanOrder adaptFloatingRateLoanOrder( String currency, BigDecimal amount, int dayPeriod, String direction, String id, BigDecimal rate) { OrderType orderType = direction.equalsIgnoreCase("loan") ? OrderType.BID : OrderType.ASK; return new FloatingRateLoanOrder(orderType, currency, amount, dayPeriod, id, null, rate); } public static Trade adaptTrade(BitfinexTrade trade, CurrencyPair currencyPair) { OrderType orderType = trade.getType().equals("buy") ? OrderType.BID : OrderType.ASK; BigDecimal amount = trade.getAmount(); BigDecimal price = trade.getPrice(); Date date = DateUtils.fromMillisUtc(trade.getTimestamp() * 1000L); // Bitfinex uses Unix timestamps final String tradeId = String.valueOf(trade.getTradeId()); return new Trade.Builder() .type(orderType) .originalAmount(amount) .currencyPair(currencyPair) .price(price) .timestamp(date) .id(tradeId) .build(); } public static Trades adaptTrades(BitfinexTrade[] trades, CurrencyPair currencyPair) { List<Trade> tradesList = new ArrayList<>(trades.length); long lastTradeId = 0; for (BitfinexTrade trade : trades) { long tradeId = trade.getTradeId(); if (tradeId > lastTradeId) { lastTradeId = tradeId; } tradesList.add(adaptTrade(trade, currencyPair)); } return new Trades(tradesList, lastTradeId, TradeSortType.SortByID); } public static Ticker adaptTicker(BitfinexTicker bitfinexTicker, CurrencyPair currencyPair) { BigDecimal last = bitfinexTicker.getLast_price(); BigDecimal bid = bitfinexTicker.getBid(); BigDecimal bidSize = bitfinexTicker.getBidSize(); BigDecimal ask = bitfinexTicker.getAsk(); BigDecimal askSize = bitfinexTicker.getAskSize(); BigDecimal high = bitfinexTicker.getHigh(); BigDecimal low = bitfinexTicker.getLow(); BigDecimal volume = bitfinexTicker.getVolume(); Date timestamp = DateUtils.fromMillisUtc((long) (bitfinexTicker.getTimestamp() * 1000L)); return new Ticker.Builder() .currencyPair(currencyPair) .last(last) .bid(bid) .bidSize(bidSize) .ask(ask) .askSize(askSize) .high(high) .low(low) .volume(volume) .timestamp(timestamp) .build(); } public static List<Wallet> adaptWallets(BitfinexBalancesResponse[] response) { Map<String, Map<String, BigDecimal[]>> walletsBalancesMap = new HashMap<>(); // for each currency we have multiple balances types: exchange, trading, deposit. // each of those may be partially frozen/available for (BitfinexBalancesResponse balance : response) { String walletId = balance.getType(); if (!walletsBalancesMap.containsKey(walletId)) { walletsBalancesMap.put(walletId, new HashMap<>()); } Map<String, BigDecimal[]> balancesByCurrency = walletsBalancesMap.get(walletId); // {total, available} String currencyName = adaptBitfinexCurrency(balance.getCurrency()); BigDecimal[] balanceDetail = balancesByCurrency.get(currencyName); if (balanceDetail == null) { balanceDetail = new BigDecimal[] {balance.getAmount(), balance.getAvailable()}; } else { balanceDetail[0] = balanceDetail[0].add(balance.getAmount()); balanceDetail[1] = balanceDetail[1].add(balance.getAvailable()); } balancesByCurrency.put(currencyName, balanceDetail); } List<Wallet> wallets = new ArrayList<>(); for (Entry<String, Map<String, BigDecimal[]>> walletData : walletsBalancesMap.entrySet()) { Map<String, BigDecimal[]> balancesByCurrency = walletData.getValue(); List<Balance> balances = new ArrayList<>(balancesByCurrency.size()); for (Entry<String, BigDecimal[]> entry : balancesByCurrency.entrySet()) { String currencyName = entry.getKey(); BigDecimal[] balanceDetail = entry.getValue(); BigDecimal balanceTotal = balanceDetail[0]; BigDecimal balanceAvailable = balanceDetail[1]; balances.add( new Balance(Currency.getInstance(currencyName), balanceTotal, balanceAvailable)); } wallets.add(Wallet.Builder.from(balances).id(walletData.getKey()).build()); } return wallets; } public static OpenOrders adaptOrders(BitfinexOrderStatusResponse[] activeOrders) { List<LimitOrder> limitOrders = new ArrayList<>(); List<Order> hiddenOrders = new ArrayList<>(); for (BitfinexOrderStatusResponse order : activeOrders) { OrderType orderType = order.getSide().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK; OrderStatus status = adaptOrderStatus(order); CurrencyPair currencyPair = adaptCurrencyPair(order.getSymbol()); Date timestamp = convertBigDecimalTimestampToDate(order.getTimestamp()); Supplier<MarketOrder> marketOrderCreator = () -> new MarketOrder( orderType, order.getOriginalAmount(), currencyPair, String.valueOf(order.getId()), timestamp, order.getAvgExecutionPrice(), order.getExecutedAmount(), null, status); Supplier<LimitOrder> limitOrderCreator = () -> new LimitOrder( orderType, order.getOriginalAmount(), currencyPair, String.valueOf(order.getId()), timestamp, order.getPrice(), order.getAvgExecutionPrice(), order.getExecutedAmount(), null, status); Supplier<StopOrder> stopOrderCreator = () -> new StopOrder( orderType, order.getOriginalAmount(), currencyPair, String.valueOf(order.getId()), timestamp, order.getPrice(), null, order.getAvgExecutionPrice(), order.getExecutedAmount(), status); LimitOrder limitOrder = null; StopOrder stopOrder = null; MarketOrder marketOrder = null; Optional<BitfinexOrderType> bitfinexOrderType = Arrays.stream(BitfinexOrderType.values()) .filter(v -> v.getValue().equals(order.getType())) .findFirst(); if (bitfinexOrderType.isPresent()) { switch (bitfinexOrderType.get()) { case FILL_OR_KILL: limitOrder = limitOrderCreator.get(); limitOrder.addOrderFlag(BitfinexOrderFlags.FILL_OR_KILL); break; case MARGIN_FILL_OR_KILL: limitOrder = limitOrderCreator.get(); limitOrder.addOrderFlag(BitfinexOrderFlags.FILL_OR_KILL); limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN); break; case MARGIN_LIMIT: limitOrder = limitOrderCreator.get(); limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN); break; case MARGIN_STOP: stopOrder = stopOrderCreator.get(); stopOrder.addOrderFlag(BitfinexOrderFlags.STOP); stopOrder.addOrderFlag(BitfinexOrderFlags.MARGIN); break; case MARGIN_STOP_LIMIT: stopLimitWarning(); stopOrder = stopOrderCreator.get(); stopOrder.addOrderFlag(BitfinexOrderFlags.STOP); stopOrder.addOrderFlag(BitfinexOrderFlags.MARGIN); break; case MARGIN_TRAILING_STOP: limitOrder = limitOrderCreator.get(); limitOrder.addOrderFlag(BitfinexOrderFlags.TRAILING_STOP); limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN); break; case STOP: stopOrder = stopOrderCreator.get(); stopOrder.addOrderFlag(BitfinexOrderFlags.STOP); break; case STOP_LIMIT: stopLimitWarning(); stopOrder = stopOrderCreator.get(); stopOrder.addOrderFlag(BitfinexOrderFlags.STOP); break; case TRAILING_STOP: limitOrder = limitOrderCreator.get(); limitOrder.addOrderFlag(BitfinexOrderFlags.TRAILING_STOP); break; case LIMIT: limitOrder = limitOrderCreator.get(); break; case MARGIN_MARKET: case MARKET: marketOrder = marketOrderCreator.get(); break; default: log.warn( "Unhandled Bitfinex order type [{}]. Defaulting to limit order", order.getType()); limitOrder = limitOrderCreator.get(); break; } } else { log.warn("Unknown Bitfinex order type [{}]. Defaulting to limit order", order.getType()); limitOrder = limitOrderCreator.get(); } if (limitOrder != null) { limitOrders.add(limitOrder); } else if (stopOrder != null) { hiddenOrders.add(stopOrder); } else if (marketOrder != null) { hiddenOrders.add(marketOrder); } } return new OpenOrders(limitOrders, hiddenOrders); } private static void stopLimitWarning() { if (warnedStopLimit.compareAndSet(false, true)) { log.warn( "Found a stop-limit order. Bitfinex v1 API does not return limit prices for stop-limit " + "orders so these are returned as stop-at-market orders. This warning will only appear " + "once."); } } public static UserTrades adaptTradeHistory(BitfinexTradeResponse[] trades, String symbol) { List<UserTrade> pastTrades = new ArrayList<>(trades.length); CurrencyPair currencyPair = adaptCurrencyPair(symbol); for (BitfinexTradeResponse trade : trades) { OrderType orderType = trade.getType().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK; Date timestamp = convertBigDecimalTimestampToDate(trade.getTimestamp()); final BigDecimal fee = trade.getFeeAmount() == null ? null : trade.getFeeAmount().negate(); pastTrades.add( new UserTrade.Builder() .type(orderType) .originalAmount(trade.getAmount()) .currencyPair(currencyPair) .price(trade.getPrice()) .timestamp(timestamp) .id(trade.getTradeId()) .orderId(trade.getOrderId()) .feeAmount(fee) .feeCurrency(Currency.getInstance(trade.getFeeCurrency())) .build()); } return new UserTrades(pastTrades, TradeSortType.SortByTimestamp); } public static UserTrades adaptTradeHistoryV2( List<org.knowm.xchange.bitfinex.v2.dto.trade.Trade> trades) { List<UserTrade> pastTrades = new ArrayList<>(trades.size()); for (org.knowm.xchange.bitfinex.v2.dto.trade.Trade trade : trades) { OrderType orderType = trade.getExecAmount().signum() >= 0 ? OrderType.BID : OrderType.ASK; BigDecimal amount = trade.getExecAmount().signum() == -1 ? trade.getExecAmount().negate() : trade.getExecAmount(); final BigDecimal fee = trade.getFee() != null ? trade.getFee().negate() : null; pastTrades.add( new UserTrade.Builder() .type(orderType) .originalAmount(amount) .currencyPair(adaptCurrencyPair(trade.getSymbol())) .price(trade.getExecPrice()) .timestamp(trade.getTimestamp()) .id(trade.getId()) .orderId(trade.getOrderId()) .feeAmount(fee) .feeCurrency(Currency.getInstance(trade.getFeeCurrency())) .build()); } return new UserTrades(pastTrades, TradeSortType.SortByTimestamp); } private static Date convertBigDecimalTimestampToDate(BigDecimal timestamp) { BigDecimal timestampInMillis = timestamp.multiply(new BigDecimal("1000")); return new Date(timestampInMillis.longValue()); } public static ExchangeMetaData adaptMetaData( List<CurrencyPair> currencyPairs, ExchangeMetaData metaData) { Map<CurrencyPair, CurrencyPairMetaData> pairsMap = metaData.getCurrencyPairs(); Map<Currency, CurrencyMetaData> currenciesMap = metaData.getCurrencies(); // Remove pairs that are no-longer in use pairsMap.keySet().retainAll(currencyPairs); // Remove currencies that are no-longer in use Set<Currency> currencies = currencyPairs.stream() .flatMap(pair -> Stream.of(pair.base, pair.counter)) .collect(Collectors.toSet()); currenciesMap.keySet().retainAll(currencies); // Add missing pairs and currencies for (CurrencyPair c : currencyPairs) { if (!pairsMap.containsKey(c)) { pairsMap.put(c, null); } if (!currenciesMap.containsKey(c.base)) { currenciesMap.put( c.base, new CurrencyMetaData( 2, null)); // When missing, add default meta-data with scale of 2 (Bitfinex's minimal // scale) } if (!currenciesMap.containsKey(c.counter)) { currenciesMap.put(c.counter, new CurrencyMetaData(2, null)); } } return metaData; } /** * Flipped order of arguments to avoid type-erasure clash with {@link #adaptMetaData(List, * ExchangeMetaData)} * * @param exchangeMetaData The exchange metadata provided from bitfinex.json. * @param symbolDetails The symbol data fetced from Bitfinex. * @return The combined result. */ public static ExchangeMetaData adaptMetaData( ExchangeMetaData exchangeMetaData, List<BitfinexSymbolDetail> symbolDetails, Map<CurrencyPair, BigDecimal> lastPrices) { final Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData.getCurrencyPairs(); symbolDetails.parallelStream() .forEach( bitfinexSymbolDetail -> { final CurrencyPair currencyPair = adaptCurrencyPair(bitfinexSymbolDetail.getPair()); // Infer price-scale from last and price-precision BigDecimal last = lastPrices.get(currencyPair); if (last != null) { int pricePercision = bitfinexSymbolDetail.getPrice_precision(); int priceScale = last.scale() + (pricePercision - last.precision()); CurrencyPairMetaData newMetaData = new CurrencyPairMetaData( currencyPairs.get(currencyPair) == null ? null : currencyPairs .get(currencyPair) .getTradingFee(), // Take tradingFee from static metaData if exists bitfinexSymbolDetail.getMinimum_order_size(), bitfinexSymbolDetail.getMaximum_order_size(), priceScale, null); currencyPairs.put(currencyPair, newMetaData); } }); return exchangeMetaData; } public static ExchangeMetaData adaptMetaData( BitfinexAccountFeesResponse accountFeesResponse, ExchangeMetaData metaData) { Map<Currency, CurrencyMetaData> currencies = metaData.getCurrencies(); final Map<Currency, BigDecimal> withdrawFees = accountFeesResponse.getWithdraw(); withdrawFees.forEach( (currency, withdrawalFee) -> { CurrencyMetaData newMetaData = new CurrencyMetaData( // Currency should have at least the scale of the withdrawalFee currencies.get(currency) == null ? withdrawalFee.scale() : Math.max(withdrawalFee.scale(), currencies.get(currency).getScale()), withdrawalFee); currencies.put(currency, newMetaData); }); return metaData; } public static ExchangeMetaData adaptMetaData( BitfinexAccountInfosResponse[] bitfinexAccountInfos, ExchangeMetaData exchangeMetaData) { final Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData.getCurrencyPairs(); // lets go with the assumption that the trading fees are common across all trading pairs for // now. // also setting the taker_fee as the trading_fee for now. final CurrencyPairMetaData metaData = new CurrencyPairMetaData( bitfinexAccountInfos[0].getTakerFees().movePointLeft(2), null, null, null, null); currencyPairs.keySet().parallelStream() .forEach( currencyPair -> currencyPairs.merge( currencyPair, metaData, (oldMetaData, newMetaData) -> new CurrencyPairMetaData( newMetaData.getTradingFee(), oldMetaData.getMinimumAmount(), oldMetaData.getMaximumAmount(), oldMetaData.getPriceScale(), oldMetaData.getFeeTiers()))); return exchangeMetaData; } public static List<FundingRecord> adaptFundingHistory(List<Movement> movementHistorys) { final List<FundingRecord> fundingRecords = new ArrayList<>(); for (Movement movement : movementHistorys) { Currency currency = Currency.getInstance(movement.getCurency()); FundingRecord.Type type = movement.getAmount().compareTo(BigDecimal.ZERO) < 0 ? FundingRecord.Type.WITHDRAWAL : FundingRecord.Type.DEPOSIT; FundingRecord.Status status = FundingRecord.Status.resolveStatus(movement.getStatus()); if (status == null && movement .getStatus() .equalsIgnoreCase("CANCELED")) // there's a spelling mistake in the protocol status = FundingRecord.Status.CANCELLED; BigDecimal amount = movement.getAmount().abs(); BigDecimal fee = movement.getFees().abs(); if (fee != null && type.isOutflowing()) { // The amount reported form Bitfinex on a withdrawal is without the fee, so it has to be // added to get the full amount withdrawn from the wallet // Deposits don't seem to have fees, but it seems reasonable to assume that the reported // value is the full amount added to the wallet amount = amount.add(fee); } FundingRecord fundingRecordEntry = new FundingRecord( movement.getDestinationAddress(), null, movement.getMtsUpdated(), currency, amount, movement.getId(), movement.getTransactionId(), type, status, null, fee, null); fundingRecords.add(fundingRecordEntry); } return fundingRecords; } public static List<FundingRecord> adaptFundingHistory( BitfinexDepositWithdrawalHistoryResponse[] bitfinexDepositWithdrawalHistoryResponses) { final List<FundingRecord> fundingRecords = new ArrayList<>(); for (BitfinexDepositWithdrawalHistoryResponse responseEntry : bitfinexDepositWithdrawalHistoryResponses) { String address = responseEntry.getAddress(); String description = responseEntry.getDescription(); Currency currency = Currency.getInstance(responseEntry.getCurrency()); FundingRecord.Status status = FundingRecord.Status.resolveStatus(responseEntry.getStatus()); if (status == null && responseEntry .getStatus() .equalsIgnoreCase("CANCELED")) // there's a spelling mistake in the protocol status = FundingRecord.Status.CANCELLED; String txnId = null; if (status == null || !status.equals(FundingRecord.Status.CANCELLED)) { /* sometimes the description looks like this (with the txn hash in it): "description":"a9d387cf5d9df58ff2ac4a338e0f050fd3857cf78d1dbca4f33619dc4ccdac82","address":"1Enx... and sometimes like this (with the address in it as well as the txn hash): "description":"3AXVnDapuRiAn73pjKe7gukLSx5813oFyn, txid: aa4057486d5f73747167beb9949a0dfe17b5fc630499a66af075abdaf4986987","address":"3AX... and sometimes when cancelled "description":"3LFVTLFZoDDzLCcLGDDQ7MNkk4YPe26Yva, expired","address":"3LFV... */ String cleanedDescription = description.replace(",", "").replace("txid:", "").trim().toLowerCase(); // Address will only be present for crypto payments. It will be null for all fiat payments if (address != null) { cleanedDescription = cleanedDescription.replace(address.toLowerCase(), "").trim(); } // check its just some hex characters, and if so lets assume its the txn hash if (cleanedDescription.matches("^(0x)?[0-9a-f]+$")) { txnId = cleanedDescription; } } FundingRecord fundingRecordEntry = new FundingRecord( address, responseEntry.getTimestamp(), currency, responseEntry.getAmount(), String.valueOf(responseEntry.getId()), txnId, responseEntry.getType(), status, null, null, description); fundingRecords.add(fundingRecordEntry); } return fundingRecords; } public static class OrdersContainer { private final long timestamp; private final List<LimitOrder> limitOrders; /** * Constructor * * @param timestamp The timestamp for the data fetched. * @param limitOrders The orders. */ public OrdersContainer(long timestamp, List<LimitOrder> limitOrders) { this.timestamp = timestamp; this.limitOrders = limitOrders; } public long getTimestamp() { return timestamp; } public List<LimitOrder> getLimitOrders() { return limitOrders; } } ////// v2 public static String adaptCurrencyPairsToTickersParam(Collection<CurrencyPair> currencyPairs) { return currencyPairs == null || currencyPairs.isEmpty() ? "ALL" : currencyPairs.stream() .map(BitfinexAdapters::adaptCurrencyPair) .collect(Collectors.joining(",")); } public static Ticker adaptTicker( org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexTicker bitfinexTicker) { BigDecimal last = bitfinexTicker.getLastPrice(); BigDecimal bid = bitfinexTicker.getBid(); BigDecimal bidSize = bitfinexTicker.getBidSize(); BigDecimal ask = bitfinexTicker.getAsk(); BigDecimal askSize = bitfinexTicker.getAskSize(); BigDecimal high = bitfinexTicker.getHigh(); BigDecimal low = bitfinexTicker.getLow(); BigDecimal volume = bitfinexTicker.getVolume(); BigDecimal percentageChange = bitfinexTicker.getDailyChangePerc().multiply(new BigDecimal("100"), new MathContext(8)); CurrencyPair currencyPair = CurrencyPairDeserializer.getCurrencyPairFromString(bitfinexTicker.getSymbol().substring(1)); return new Ticker.Builder() .currencyPair(currencyPair) .last(last) .bid(bid) .ask(ask) .high(high) .low(low) .volume(volume) .bidSize(bidSize) .askSize(askSize) .percentageChange(percentageChange) .build(); } public static Trade adaptPublicTrade(BitfinexPublicTrade trade, CurrencyPair currencyPair) { OrderType orderType = trade.getType(); BigDecimal amount = trade.getAmount(); BigDecimal price = trade.getPrice(); Date date = DateUtils.fromMillisUtc(trade.getTimestamp()); final String tradeId = String.valueOf(trade.getTradeId()); return new Trade.Builder() .type(orderType) .originalAmount(amount == null ? null : amount.abs()) .currencyPair(currencyPair) .price(price) .timestamp(date) .id(tradeId) .build(); } public static Trades adaptPublicTrades(BitfinexPublicTrade[] trades, CurrencyPair currencyPair) { List<Trade> tradesList = new ArrayList<>(trades.length); long lastTradeId = 0; for (BitfinexPublicTrade trade : trades) { long tradeId = trade.getTradeId(); if (tradeId > lastTradeId) { lastTradeId = tradeId; } tradesList.add(adaptPublicTrade(trade, currencyPair)); } return new Trades(tradesList, lastTradeId, TradeSortType.SortByID); } public static org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexTicker[] adoptBitfinexTickers( List<ArrayNode> tickers) throws IOException { return tickers.stream() .map( array -> { // tBTCUSD -> traiding pair // fUSD -> funding currency try { String symbol = array.get(0).asText(); switch (symbol.charAt(0)) { case 't': return mapper.treeToValue(array, BitfinexTickerTraidingPair.class); case 'f': return mapper.treeToValue(array, BitfinexTickerFundingCurrency.class); default: throw new RuntimeException( "Invalid symbol <" + symbol + ">, it must start with 't' or 'f'."); } } catch (JsonProcessingException e) { throw new RuntimeException("Could not convert ticker.", e); } }) .toArray(org.knowm.xchange.bitfinex.v2.dto.marketdata.BitfinexTicker[]::new); } }
package com.mossle.internal.oss.service; import java.io.InputStream; import java.util.Date; import java.util.UUID; import javax.annotation.Resource; import com.mossle.core.store.InputStreamDataSource; import com.mossle.core.store.StoreHelper; import com.mossle.core.store.StoreResult; import com.mossle.internal.oss.persistence.domain.OssBucket; import com.mossle.internal.oss.persistence.domain.OssObject; import com.mossle.internal.oss.persistence.manager.OssBucketManager; import com.mossle.internal.oss.persistence.manager.OssObjectManager; import com.mossle.internal.oss.support.OssDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @Service public class OssService { private static Logger logger = LoggerFactory.getLogger(OssService.class); private StoreHelper storeHelper; private OssBucketManager ossBucketManager; private OssObjectManager ossObjectManager; public OssDTO putObject(InputStream is, String bucketName, String objectName) { OssBucket ossBucket = ossBucketManager.findUniqueBy("name", bucketName); if (ossBucket == null) { logger.info("cannot find bucket : {}", bucketName); return null; } OssObject ossObject = ossObjectManager.findUnique( "from OssObject where ossBucket=? and name=?", ossBucket, objectName); try { StoreResult storeResult = storeHelper.saveStore(bucketName, new InputStreamDataSource(objectName, is)); if (ossObject != null) { ossObject.setName(objectName); ossObject.setPath(storeResult.getKey()); ossObject.setCreateTime(new Date()); } else { ossObject = new OssObject(); ossObject.setOssBucket(ossBucket); ossObject.setName(objectName); ossObject.setPath(storeResult.getKey()); ossObject.setCreateTime(new Date()); } ossObjectManager.save(ossObject); } catch (Exception ex) { logger.error(ex.getMessage(), ex); return null; } OssDTO ossDto = new OssDTO(); ossDto.setBucketName(bucketName); ossDto.setObjectName(objectName); return ossDto; } public OssDTO postObject(InputStream is, String bucketName) { String objectName = UUID.randomUUID().toString(); return this.putObject(is, bucketName, objectName); } public OssDTO getObject(String bucketName, String objectName) { OssBucket ossBucket = ossBucketManager.findUniqueBy("name", bucketName); if (ossBucket == null) { logger.info("cannot find bucket : {}", bucketName); return null; } OssObject ossObject = ossObjectManager.findUnique( "from OssObject where ossBucket=? and name=?", ossBucket, objectName); if (ossObject == null) { logger.info("cannot find object : {} {}", bucketName, objectName); return null; } OssDTO ossDto = new OssDTO(); ossDto.setBucketName(bucketName); ossDto.setObjectName(objectName); try { String key = ossObject.getPath(); StoreResult storeResult = storeHelper.getStore(bucketName, key); if (storeResult == null) { logger.info("cannot find store : {} {}", bucketName, key); return null; } ossDto.setInputStream(storeResult.getDataSource().getInputStream()); } catch (Exception ex) { logger.error(ex.getMessage(), ex); return null; } return ossDto; } public OssDTO deleteObject(String bucketName, String objectName) { OssBucket ossBucket = ossBucketManager.findUniqueBy("name", bucketName); if (ossBucket == null) { logger.info("cannot find bucket : {}", bucketName); return null; } OssObject ossObject = ossObjectManager.findUnique( "from OssObject where ossBucket=? and name=?", ossBucket, objectName); if (ossObject == null) { logger.info("cannot find object : {} {}", bucketName, objectName); return null; } try { String key = ossObject.getPath(); storeHelper.removeStore(bucketName, key); ossObjectManager.remove(ossObject); } catch (Exception ex) { logger.error(ex.getMessage(), ex); return null; } OssDTO ossDto = new OssDTO(); ossDto.setBucketName(bucketName); ossDto.setObjectName(objectName); return ossDto; } public OssDTO doesObjectExist(String bucketName, String objectName) { OssBucket ossBucket = ossBucketManager.findUniqueBy("name", bucketName); if (ossBucket == null) { logger.info("cannot find bucket : {}", bucketName); return null; } OssObject ossObject = ossObjectManager.findUnique( "from OssObject where ossBucket=? and name=?", ossBucket, objectName); if (ossObject == null) { logger.info("cannot find object : {} {}", bucketName, objectName); return null; } OssDTO ossDto = new OssDTO(); ossDto.setBucketName(bucketName); ossDto.setObjectName(objectName); return ossDto; } @Resource public void setStoreHelper(StoreHelper storeHelper) { this.storeHelper = storeHelper; } @Resource public void setOssBucketManager(OssBucketManager ossBucketManager) { this.ossBucketManager = ossBucketManager; } @Resource public void setOssObjectManager(OssObjectManager ossObjectManager) { this.ossObjectManager = ossObjectManager; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight; import com.intellij.codeInspection.dataFlow.HardcodedContracts; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.java.stubs.index.JavaAnnotationIndex; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import static com.intellij.codeInsight.AnnotationUtil.NOT_NULL; import static com.intellij.codeInsight.AnnotationUtil.NULLABLE; @State(name = "NullableNotNullManager") public class NullableNotNullManagerImpl extends NullableNotNullManager implements PersistentStateComponent<Element>, ModificationTracker { public static final String TYPE_QUALIFIER_NICKNAME = "javax.annotation.meta.TypeQualifierNickname"; private static final String INSTRUMENTED_NOT_NULLS_TAG = "instrumentedNotNulls"; public String myDefaultNullable = NULLABLE; public String myDefaultNotNull = NOT_NULL; public final JDOMExternalizableStringList myNullables = new JDOMExternalizableStringList(Arrays.asList(DEFAULT_NULLABLES)); public final JDOMExternalizableStringList myNotNulls = new JDOMExternalizableStringList(Arrays.asList(DEFAULT_NOT_NULLS)); private List<String> myInstrumentedNotNulls = ContainerUtil.newArrayList(NOT_NULL); private final SimpleModificationTracker myTracker = new SimpleModificationTracker(); public NullableNotNullManagerImpl(Project project) { super(project); } @Override public void setNotNulls(@NotNull String... annotations) { myNotNulls.clear(); Collections.addAll(myNotNulls, annotations); normalizeDefaults(); } @Override public void setNullables(@NotNull String... annotations) { myNullables.clear(); Collections.addAll(myNullables, annotations); normalizeDefaults(); } @Override @NotNull public String getDefaultNullable() { return myDefaultNullable; } @Override public void setDefaultNullable(@NotNull String defaultNullable) { LOG.assertTrue(getNullables().contains(defaultNullable)); myDefaultNullable = defaultNullable; myTracker.incModificationCount(); } @Override @NotNull public String getDefaultNotNull() { return myDefaultNotNull; } @Override public void setDefaultNotNull(@NotNull String defaultNotNull) { LOG.assertTrue(getNotNulls().contains(defaultNotNull)); myDefaultNotNull = defaultNotNull; myTracker.incModificationCount(); } @Override @NotNull public List<String> getNullables() { return Collections.unmodifiableList(myNullables); } @Override @NotNull public List<String> getNotNulls() { return Collections.unmodifiableList(myNotNulls); } @NotNull @Override public List<String> getInstrumentedNotNulls() { return Collections.unmodifiableList(myInstrumentedNotNulls); } @Override public void setInstrumentedNotNulls(@NotNull List<String> names) { myInstrumentedNotNulls = ContainerUtil.sorted(names); myTracker.incModificationCount(); } @Override protected boolean hasHardcodedContracts(@NotNull PsiElement element) { return HardcodedContracts.hasHardcodedContracts(element); } @Override public Element getState() { Element component = new Element("component"); if (!hasDefaultValues()) { try { DefaultJDOMExternalizer.writeExternal(this, component); } catch (WriteExternalException e) { LOG.error(e); } } if (myInstrumentedNotNulls.size() != 1 || !NOT_NULL.equals(myInstrumentedNotNulls.get(0))) { // poor man's @XCollection(style = XCollection.Style.v2) Element instrumentedNotNulls = new Element(INSTRUMENTED_NOT_NULLS_TAG); for (String value : myInstrumentedNotNulls) { instrumentedNotNulls.addContent(new Element("option").setAttribute("value", value)); } component.addContent(instrumentedNotNulls); } return component; } private boolean hasDefaultValues() { return NOT_NULL.equals(myDefaultNotNull) && NULLABLE.equals(myDefaultNullable) && new HashSet<>(myNullables).equals(ContainerUtil.newHashSet(DEFAULT_NULLABLES)) && new HashSet<>(myNotNulls).equals(ContainerUtil.newHashSet(DEFAULT_NOT_NULLS)); } @Override public void loadState(@NotNull Element state) { try { DefaultJDOMExternalizer.readExternal(this, state); normalizeDefaults(); } catch (InvalidDataException e) { LOG.error(e); } Element instrumented = state.getChild(INSTRUMENTED_NOT_NULLS_TAG); if (instrumented == null) { myInstrumentedNotNulls = ContainerUtil.newArrayList(NOT_NULL); } else { myInstrumentedNotNulls = ContainerUtil.mapNotNull(instrumented.getChildren("option"), o -> o.getAttributeValue("value")); } } private void normalizeDefaults() { myNotNulls.removeAll(ContainerUtil.newHashSet(DEFAULT_NULLABLES)); myNullables.removeAll(ContainerUtil.newHashSet(DEFAULT_NOT_NULLS)); myNullables.addAll(ContainerUtil.filter(DEFAULT_NULLABLES, s -> !myNullables.contains(s))); myNotNulls.addAll(ContainerUtil.filter(DEFAULT_NOT_NULLS, s -> !myNotNulls.contains(s))); myTracker.incModificationCount(); } @NotNull private List<PsiClass> getAllNullabilityNickNames() { if (!getNotNulls().contains(JAVAX_ANNOTATION_NONNULL)) { return Collections.emptyList(); } return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> { List<PsiClass> result = new ArrayList<>(); GlobalSearchScope scope = GlobalSearchScope.allScope(myProject); PsiClass[] nickDeclarations = JavaPsiFacade.getInstance(myProject).findClasses(TYPE_QUALIFIER_NICKNAME, scope); for (PsiClass tqNick : nickDeclarations) { result.addAll(ContainerUtil.findAll(MetaAnnotationUtil.getChildren(tqNick, scope), NullableNotNullManagerImpl::isNullabilityNickName)); } if (nickDeclarations.length == 0) { result.addAll(getUnresolvedNicknameUsages()); } return CachedValueProvider.Result.create(result, PsiModificationTracker.MODIFICATION_COUNT); }); } // some frameworks use jsr305 annotations but don't have them in classpath @NotNull private List<PsiClass> getUnresolvedNicknameUsages() { List<PsiClass> result = new ArrayList<>(); Collection<PsiAnnotation> annotations = JavaAnnotationIndex.getInstance().get(StringUtil.getShortName(TYPE_QUALIFIER_NICKNAME), myProject, GlobalSearchScope.allScope(myProject)); for (PsiAnnotation annotation : annotations) { PsiElement context = annotation.getContext(); if (context instanceof PsiModifierList && context.getContext() instanceof PsiClass) { PsiClass ownerClass = (PsiClass)context.getContext(); if (ownerClass.isAnnotationType() && isNullabilityNickName(ownerClass)) { result.add(ownerClass); } } } return result; } @Override protected NullabilityAnnotationInfo isJsr305Default(@NotNull PsiAnnotation annotation, @NotNull PsiAnnotation.TargetType[] placeTargetTypes) { PsiClass declaration = resolveAnnotationType(annotation); PsiModifierList modList = declaration == null ? null : declaration.getModifierList(); if (modList == null) return null; PsiAnnotation tqDefault = AnnotationUtil.findAnnotation(declaration, true, "javax.annotation.meta.TypeQualifierDefault"); if (tqDefault == null) return null; Set<PsiAnnotation.TargetType> required = AnnotationTargetUtil.extractRequiredAnnotationTargets(tqDefault.findAttributeValue(null)); if (required == null || (!required.isEmpty() && !ContainerUtil.intersects(required, Arrays.asList(placeTargetTypes)))) return null; for (PsiAnnotation qualifier : modList.getAnnotations()) { Nullability nullability = getJsr305QualifierNullability(qualifier); if (nullability != null) { return new NullabilityAnnotationInfo(annotation, nullability, true); } } return null; } @Nullable private static PsiClass resolveAnnotationType(@NotNull PsiAnnotation annotation) { PsiJavaCodeReferenceElement element = annotation.getNameReferenceElement(); PsiElement declaration = element == null ? null : element.resolve(); if (!(declaration instanceof PsiClass) || !((PsiClass)declaration).isAnnotationType()) return null; return (PsiClass)declaration; } @Nullable private Nullability getJsr305QualifierNullability(@NotNull PsiAnnotation qualifier) { String qName = qualifier.getQualifiedName(); if (qName == null || !qName.startsWith("javax.annotation.")) return null; if (qName.equals(JAVAX_ANNOTATION_NULLABLE) && getNullables().contains(qName)) return Nullability.NULLABLE; if (qName.equals(JAVAX_ANNOTATION_NONNULL)) return extractNullityFromWhenValue(qualifier); return null; } private static boolean isNullabilityNickName(@NotNull PsiClass candidate) { String qname = candidate.getQualifiedName(); if (qname == null || qname.startsWith("javax.annotation.")) return false; return getNickNamedNullability(candidate) != Nullability.UNKNOWN; } @NotNull private static Nullability getNickNamedNullability(@NotNull PsiClass psiClass) { if (AnnotationUtil.findAnnotation(psiClass, TYPE_QUALIFIER_NICKNAME) == null) return Nullability.UNKNOWN; PsiAnnotation nonNull = AnnotationUtil.findAnnotation(psiClass, JAVAX_ANNOTATION_NONNULL); return nonNull != null ? extractNullityFromWhenValue(nonNull) : Nullability.UNKNOWN; } @NotNull private static Nullability extractNullityFromWhenValue(@NotNull PsiAnnotation nonNull) { PsiAnnotationMemberValue when = nonNull.findAttributeValue("when"); if (when instanceof PsiReferenceExpression) { String refName = ((PsiReferenceExpression)when).getReferenceName(); if ("ALWAYS".equals(refName)) { return Nullability.NOT_NULL; } if ("MAYBE".equals(refName) || "NEVER".equals(refName)) { return Nullability.NULLABLE; } } return Nullability.UNKNOWN; } @NotNull private List<String> filterNickNames(@NotNull Nullability nullability) { return ContainerUtil.mapNotNull(getAllNullabilityNickNames(), c -> getNickNamedNullability(c) == nullability ? c.getQualifiedName() : null); } @NotNull @Override protected List<String> getNullablesWithNickNames() { return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> CachedValueProvider.Result.create(ContainerUtil.concat(getNullables(), filterNickNames(Nullability.NULLABLE)), PsiModificationTracker.MODIFICATION_COUNT)); } @NotNull @Override protected List<String> getNotNullsWithNickNames() { return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> CachedValueProvider.Result.create(ContainerUtil.concat(getNotNulls(), filterNickNames(Nullability.NOT_NULL)), PsiModificationTracker.MODIFICATION_COUNT)); } @NotNull @Override protected Set<String> getAllNullabilityAnnotationsWithNickNames() { return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> { Set<String> result = new THashSet<>(); result.addAll(getNotNulls()); result.addAll(getNullables()); result.addAll(ContainerUtil.mapNotNull(getAllNullabilityNickNames(), PsiClass::getQualifiedName)); return CachedValueProvider.Result.create(Collections.unmodifiableSet(result), PsiModificationTracker.MODIFICATION_COUNT); }); } @Override public long getModificationCount() { return myTracker.getModificationCount(); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFDescStatsReplyVer13 implements OFDescStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFDescStatsReplyVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int LENGTH = 1072; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static String DEFAULT_MFR_DESC = ""; private final static String DEFAULT_HW_DESC = ""; private final static String DEFAULT_SW_DESC = ""; private final static String DEFAULT_SERIAL_NUM = ""; private final static String DEFAULT_DP_DESC = ""; // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final String mfrDesc; private final String hwDesc; private final String swDesc; private final String serialNum; private final String dpDesc; // // Immutable default instance final static OFDescStatsReplyVer13 DEFAULT = new OFDescStatsReplyVer13( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_MFR_DESC, DEFAULT_HW_DESC, DEFAULT_SW_DESC, DEFAULT_SERIAL_NUM, DEFAULT_DP_DESC ); // package private constructor - used by readers, builders, and factory OFDescStatsReplyVer13(long xid, Set<OFStatsReplyFlags> flags, String mfrDesc, String hwDesc, String swDesc, String serialNum, String dpDesc) { if(flags == null) { throw new NullPointerException("OFDescStatsReplyVer13: property flags cannot be null"); } if(mfrDesc == null) { throw new NullPointerException("OFDescStatsReplyVer13: property mfrDesc cannot be null"); } if(hwDesc == null) { throw new NullPointerException("OFDescStatsReplyVer13: property hwDesc cannot be null"); } if(swDesc == null) { throw new NullPointerException("OFDescStatsReplyVer13: property swDesc cannot be null"); } if(serialNum == null) { throw new NullPointerException("OFDescStatsReplyVer13: property serialNum cannot be null"); } if(dpDesc == null) { throw new NullPointerException("OFDescStatsReplyVer13: property dpDesc cannot be null"); } this.xid = xid; this.flags = flags; this.mfrDesc = mfrDesc; this.hwDesc = hwDesc; this.swDesc = swDesc; this.serialNum = serialNum; this.dpDesc = dpDesc; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public String getMfrDesc() { return mfrDesc; } @Override public String getHwDesc() { return hwDesc; } @Override public String getSwDesc() { return swDesc; } @Override public String getSerialNum() { return serialNum; } @Override public String getDpDesc() { return dpDesc; } public OFDescStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFDescStatsReply.Builder { final OFDescStatsReplyVer13 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean mfrDescSet; private String mfrDesc; private boolean hwDescSet; private String hwDesc; private boolean swDescSet; private String swDesc; private boolean serialNumSet; private String serialNum; private boolean dpDescSet; private String dpDesc; BuilderWithParent(OFDescStatsReplyVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFDescStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFDescStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public String getMfrDesc() { return mfrDesc; } @Override public OFDescStatsReply.Builder setMfrDesc(String mfrDesc) { this.mfrDesc = mfrDesc; this.mfrDescSet = true; return this; } @Override public String getHwDesc() { return hwDesc; } @Override public OFDescStatsReply.Builder setHwDesc(String hwDesc) { this.hwDesc = hwDesc; this.hwDescSet = true; return this; } @Override public String getSwDesc() { return swDesc; } @Override public OFDescStatsReply.Builder setSwDesc(String swDesc) { this.swDesc = swDesc; this.swDescSet = true; return this; } @Override public String getSerialNum() { return serialNum; } @Override public OFDescStatsReply.Builder setSerialNum(String serialNum) { this.serialNum = serialNum; this.serialNumSet = true; return this; } @Override public String getDpDesc() { return dpDesc; } @Override public OFDescStatsReply.Builder setDpDesc(String dpDesc) { this.dpDesc = dpDesc; this.dpDescSet = true; return this; } @Override public OFDescStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); String mfrDesc = this.mfrDescSet ? this.mfrDesc : parentMessage.mfrDesc; if(mfrDesc == null) throw new NullPointerException("Property mfrDesc must not be null"); String hwDesc = this.hwDescSet ? this.hwDesc : parentMessage.hwDesc; if(hwDesc == null) throw new NullPointerException("Property hwDesc must not be null"); String swDesc = this.swDescSet ? this.swDesc : parentMessage.swDesc; if(swDesc == null) throw new NullPointerException("Property swDesc must not be null"); String serialNum = this.serialNumSet ? this.serialNum : parentMessage.serialNum; if(serialNum == null) throw new NullPointerException("Property serialNum must not be null"); String dpDesc = this.dpDescSet ? this.dpDesc : parentMessage.dpDesc; if(dpDesc == null) throw new NullPointerException("Property dpDesc must not be null"); // return new OFDescStatsReplyVer13( xid, flags, mfrDesc, hwDesc, swDesc, serialNum, dpDesc ); } } static class Builder implements OFDescStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean mfrDescSet; private String mfrDesc; private boolean hwDescSet; private String hwDesc; private boolean swDescSet; private String swDesc; private boolean serialNumSet; private String serialNum; private boolean dpDescSet; private String dpDesc; @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFDescStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFDescStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public String getMfrDesc() { return mfrDesc; } @Override public OFDescStatsReply.Builder setMfrDesc(String mfrDesc) { this.mfrDesc = mfrDesc; this.mfrDescSet = true; return this; } @Override public String getHwDesc() { return hwDesc; } @Override public OFDescStatsReply.Builder setHwDesc(String hwDesc) { this.hwDesc = hwDesc; this.hwDescSet = true; return this; } @Override public String getSwDesc() { return swDesc; } @Override public OFDescStatsReply.Builder setSwDesc(String swDesc) { this.swDesc = swDesc; this.swDescSet = true; return this; } @Override public String getSerialNum() { return serialNum; } @Override public OFDescStatsReply.Builder setSerialNum(String serialNum) { this.serialNum = serialNum; this.serialNumSet = true; return this; } @Override public String getDpDesc() { return dpDesc; } @Override public OFDescStatsReply.Builder setDpDesc(String dpDesc) { this.dpDesc = dpDesc; this.dpDescSet = true; return this; } // @Override public OFDescStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); String mfrDesc = this.mfrDescSet ? this.mfrDesc : DEFAULT_MFR_DESC; if(mfrDesc == null) throw new NullPointerException("Property mfrDesc must not be null"); String hwDesc = this.hwDescSet ? this.hwDesc : DEFAULT_HW_DESC; if(hwDesc == null) throw new NullPointerException("Property hwDesc must not be null"); String swDesc = this.swDescSet ? this.swDesc : DEFAULT_SW_DESC; if(swDesc == null) throw new NullPointerException("Property swDesc must not be null"); String serialNum = this.serialNumSet ? this.serialNum : DEFAULT_SERIAL_NUM; if(serialNum == null) throw new NullPointerException("Property serialNum must not be null"); String dpDesc = this.dpDescSet ? this.dpDesc : DEFAULT_DP_DESC; if(dpDesc == null) throw new NullPointerException("Property dpDesc must not be null"); return new OFDescStatsReplyVer13( xid, flags, mfrDesc, hwDesc, swDesc, serialNum, dpDesc ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFDescStatsReply> { @Override public OFDescStatsReply readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 4 byte version = bb.readByte(); if(version != (byte) 0x4) throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length != 1072) throw new OFParseError("Wrong length: Expected=1072(1072), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 0 short statsType = bb.readShort(); if(statsType != (short) 0x0) throw new OFParseError("Wrong statsType: Expected=OFStatsType.DESC(0), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer13.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); String mfrDesc = ChannelUtils.readFixedLengthString(bb, 256); String hwDesc = ChannelUtils.readFixedLengthString(bb, 256); String swDesc = ChannelUtils.readFixedLengthString(bb, 256); String serialNum = ChannelUtils.readFixedLengthString(bb, 32); String dpDesc = ChannelUtils.readFixedLengthString(bb, 256); OFDescStatsReplyVer13 descStatsReplyVer13 = new OFDescStatsReplyVer13( xid, flags, mfrDesc, hwDesc, swDesc, serialNum, dpDesc ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", descStatsReplyVer13); return descStatsReplyVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFDescStatsReplyVer13Funnel FUNNEL = new OFDescStatsReplyVer13Funnel(); static class OFDescStatsReplyVer13Funnel implements Funnel<OFDescStatsReplyVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFDescStatsReplyVer13 message, PrimitiveSink sink) { // fixed value property version = 4 sink.putByte((byte) 0x4); // fixed value property type = 19 sink.putByte((byte) 0x13); // fixed value property length = 1072 sink.putShort((short) 0x430); sink.putLong(message.xid); // fixed value property statsType = 0 sink.putShort((short) 0x0); OFStatsReplyFlagsSerializerVer13.putTo(message.flags, sink); // skip pad (4 bytes) sink.putUnencodedChars(message.mfrDesc); sink.putUnencodedChars(message.hwDesc); sink.putUnencodedChars(message.swDesc); sink.putUnencodedChars(message.serialNum); sink.putUnencodedChars(message.dpDesc); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFDescStatsReplyVer13> { @Override public void write(ByteBuf bb, OFDescStatsReplyVer13 message) { // fixed value property version = 4 bb.writeByte((byte) 0x4); // fixed value property type = 19 bb.writeByte((byte) 0x13); // fixed value property length = 1072 bb.writeShort((short) 0x430); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 0 bb.writeShort((short) 0x0); OFStatsReplyFlagsSerializerVer13.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); ChannelUtils.writeFixedLengthString(bb, message.mfrDesc, 256); ChannelUtils.writeFixedLengthString(bb, message.hwDesc, 256); ChannelUtils.writeFixedLengthString(bb, message.swDesc, 256); ChannelUtils.writeFixedLengthString(bb, message.serialNum, 32); ChannelUtils.writeFixedLengthString(bb, message.dpDesc, 256); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFDescStatsReplyVer13("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("mfrDesc=").append(mfrDesc); b.append(", "); b.append("hwDesc=").append(hwDesc); b.append(", "); b.append("swDesc=").append(swDesc); b.append(", "); b.append("serialNum=").append(serialNum); b.append(", "); b.append("dpDesc=").append(dpDesc); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFDescStatsReplyVer13 other = (OFDescStatsReplyVer13) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (mfrDesc == null) { if (other.mfrDesc != null) return false; } else if (!mfrDesc.equals(other.mfrDesc)) return false; if (hwDesc == null) { if (other.hwDesc != null) return false; } else if (!hwDesc.equals(other.hwDesc)) return false; if (swDesc == null) { if (other.swDesc != null) return false; } else if (!swDesc.equals(other.swDesc)) return false; if (serialNum == null) { if (other.serialNum != null) return false; } else if (!serialNum.equals(other.serialNum)) return false; if (dpDesc == null) { if (other.dpDesc != null) return false; } else if (!dpDesc.equals(other.dpDesc)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((mfrDesc == null) ? 0 : mfrDesc.hashCode()); result = prime * result + ((hwDesc == null) ? 0 : hwDesc.hashCode()); result = prime * result + ((swDesc == null) ? 0 : swDesc.hashCode()); result = prime * result + ((serialNum == null) ? 0 : serialNum.hashCode()); result = prime * result + ((dpDesc == null) ? 0 : dpDesc.hashCode()); return result; } }
package com.example.chain.main; import android.app.Activity; import android.content.Context; import android.content.SharedPreferences; import android.support.v4.app.FragmentActivity; import com.example.chain.core.ChainPreferenceInterface; import java.util.Set; /** * ChainPreference * * ChainPreference cp = new ChainPreference(context or activity); * cp.save(something key, something value); * cp.get~~~~(key); * * @param <T> */ public class ChainPreference<T extends ChainPreference> implements ChainPreferenceInterface<ChainPreference<T>> { private final ChainPreference self = this; private Context mContext; private SharedPreferences mPreferences; private SharedPreferences.Editor mEditor; private static final String PREFERECE = "PREF"; /** * Constructor * @param context - context * @param key - String key */ public ChainPreference(Context context, String key) { mContext = context; mPreferences = mContext.getSharedPreferences(key, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } /** * Constructor * @param context - activity * @param key - String key */ public ChainPreference(Activity context, String key) { mContext = context; mPreferences = mContext.getSharedPreferences(key, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } /** * Constructor * @param context * @param key */ public ChainPreference(FragmentActivity context, String key) { mContext = context; mPreferences = mContext.getSharedPreferences(key, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } /** * Constructor * @param context - context */ public ChainPreference(Context context) { mContext = context; mPreferences = mContext.getSharedPreferences(PREFERECE, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } /** * Constructor * @param context - activity */ public ChainPreference(Activity context) { mContext = context; mPreferences = mContext.getSharedPreferences(PREFERECE, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } /** * Constructor * @param context - FramgnetActivity */ public ChainPreference(FragmentActivity context) { mContext = context; mPreferences = mContext.getSharedPreferences(PREFERECE, Context.MODE_PRIVATE); mEditor = mPreferences.edit(); } // ******************************************************************* // Method - save // ******************************************************************* /** * save(String key, String value); * @param key - String key * @param value - String value * @return self */ @Override public ChainPreference<T> save(String key, String value) { mEditor.putString(key, value); mEditor.commit(); return self; } /** * save(String key, int value) * @param key - String key * @param value - int value * @return self */ @Override public ChainPreference<T> save(String key, int value) { mEditor.putInt(key, value); mEditor.commit(); return self; } /** * save(String key, boolean value) * @param key - String key * @param value - boolean value * @return */ @Override public ChainPreference<T> save(String key, boolean value) { mEditor.putBoolean(key, value); mEditor.commit(); return self; } /** * save(String key, float value) * @param key - String key * @param value - float value * @return */ @Override public ChainPreference<T> save(String key, float value) { mEditor.putFloat(key, value); mEditor.commit(); return self; } /** * save(String key, long value) * @param key - String kye * @param value - long value * @return */ @Override public ChainPreference<T> save(String key, long value) { mEditor.putLong(key, value); mEditor.commit(); return self; } /** * save(String key, Set<String> value) * @param key - String key * @param value - Set<String> value * @return */ @Override public ChainPreference<T> save(String key, Set<String> value) { mEditor.putStringSet(key, value); mEditor.commit(); return self; } // ******************************************************************* // Method - get() // ******************************************************************* /** * getInt(String key) * @param key * @return */ @Override public int getInt(String key) { int value = mPreferences.getInt(key, -1); return value; } /** * getString(String key) * @param key * @return */ @Override public String getString(String key) { return mPreferences.getString(key, ""); } /** * getBoolean(String key) * @param key * @return */ @Override public boolean getBoolean(String key) { return mPreferences.getBoolean(key, false); } /** * getFloat(String key) * @param key * @return */ @Override public float getFloat(String key) { return mPreferences.getFloat(key, -1f); } /** * getLong(String key) * @param key * @return */ @Override public long getLong(String key) { return mPreferences.getLong(key, -1L); } /** * getSet(String key) * @param key * @return */ @Override public Set<String> getSet(String key) { Set<String> set = null; return mPreferences.getStringSet(key, set); } }
/* * Copyright 2007 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.parsing; import com.google.common.collect.ImmutableList; import com.google.javascript.jscomp.parsing.Config.LanguageMode; import com.google.javascript.jscomp.testing.TestErrorReporter; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.head.ScriptRuntime; import com.google.javascript.rhino.jstype.SimpleSourceFile; import com.google.javascript.rhino.jstype.StaticSourceFile; import com.google.javascript.rhino.testing.BaseJSTypeTestCase; import java.io.IOException; import java.util.List; import java.util.logging.Logger; public class ParserTest extends BaseJSTypeTestCase { private static final String SUSPICIOUS_COMMENT_WARNING = IRFactory.SUSPICIOUS_COMMENT_WARNING; private static final String TRAILING_COMMA_MESSAGE = ScriptRuntime.getMessage0("msg.extra.trailing.comma"); private static final String BAD_PROPERTY_MESSAGE = ScriptRuntime.getMessage0("msg.bad.prop"); private static final String MISSING_GT_MESSAGE = "Bad type annotation. " + com.google.javascript.rhino.ScriptRuntime.getMessage0( "msg.jsdoc.missing.gt"); private static final String MISPLACED_TYPE_ANNOTATION = IRFactory.MISPLACED_TYPE_ANNOTATION; private Config.LanguageMode mode; private boolean isIdeMode = false; @Override protected void setUp() throws Exception { super.setUp(); mode = LanguageMode.ECMASCRIPT3; isIdeMode = false; } public void testLinenoCharnoAssign1() throws Exception { Node assign = parse("a = b").getFirstChild().getFirstChild(); assertEquals(Token.ASSIGN, assign.getType()); assertEquals(1, assign.getLineno()); assertEquals(0, assign.getCharno()); } public void testLinenoCharnoAssign2() throws Exception { Node assign = parse("\n a.g.h.k = 45").getFirstChild().getFirstChild(); assertEquals(Token.ASSIGN, assign.getType()); assertEquals(2, assign.getLineno()); assertEquals(1, assign.getCharno()); } public void testLinenoCharnoCall() throws Exception { Node call = parse("\n foo(123);").getFirstChild().getFirstChild(); assertEquals(Token.CALL, call.getType()); assertEquals(2, call.getLineno()); assertEquals(1, call.getCharno()); } public void testLinenoCharnoGetProp1() throws Exception { Node getprop = parse("\n foo.bar").getFirstChild().getFirstChild(); assertEquals(Token.GETPROP, getprop.getType()); assertEquals(2, getprop.getLineno()); assertEquals(1, getprop.getCharno()); Node name = getprop.getFirstChild().getNext(); assertEquals(Token.STRING, name.getType()); assertEquals(2, name.getLineno()); assertEquals(5, name.getCharno()); } public void testLinenoCharnoGetProp2() throws Exception { Node getprop = parse("\n foo.\nbar").getFirstChild().getFirstChild(); assertEquals(Token.GETPROP, getprop.getType()); assertEquals(2, getprop.getLineno()); assertEquals(1, getprop.getCharno()); Node name = getprop.getFirstChild().getNext(); assertEquals(Token.STRING, name.getType()); assertEquals(3, name.getLineno()); assertEquals(0, name.getCharno()); } public void testLinenoCharnoGetelem1() throws Exception { Node call = parse("\n foo[123]").getFirstChild().getFirstChild(); assertEquals(Token.GETELEM, call.getType()); assertEquals(2, call.getLineno()); assertEquals(1, call.getCharno()); } public void testLinenoCharnoGetelem2() throws Exception { Node call = parse("\n \n foo()[123]").getFirstChild().getFirstChild(); assertEquals(Token.GETELEM, call.getType()); assertEquals(3, call.getLineno()); assertEquals(1, call.getCharno()); } public void testLinenoCharnoGetelem3() throws Exception { Node call = parse("\n \n (8 + kl)[123]").getFirstChild().getFirstChild(); assertEquals(Token.GETELEM, call.getType()); assertEquals(3, call.getLineno()); assertEquals(2, call.getCharno()); } public void testLinenoCharnoForComparison() throws Exception { Node lt = parse("for (; i < j;){}").getFirstChild().getFirstChild().getNext(); assertEquals(Token.LT, lt.getType()); assertEquals(1, lt.getLineno()); assertEquals(7, lt.getCharno()); } public void testLinenoCharnoHook() throws Exception { Node n = parse("\n a ? 9 : 0").getFirstChild().getFirstChild(); assertEquals(Token.HOOK, n.getType()); assertEquals(2, n.getLineno()); assertEquals(1, n.getCharno()); } public void testLinenoCharnoArrayLiteral() throws Exception { Node n = parse("\n [8, 9]").getFirstChild().getFirstChild(); assertEquals(Token.ARRAYLIT, n.getType()); assertEquals(2, n.getLineno()); assertEquals(2, n.getCharno()); n = n.getFirstChild(); assertEquals(Token.NUMBER, n.getType()); assertEquals(2, n.getLineno()); assertEquals(3, n.getCharno()); n = n.getNext(); assertEquals(Token.NUMBER, n.getType()); assertEquals(2, n.getLineno()); assertEquals(6, n.getCharno()); } public void testLinenoCharnoObjectLiteral() throws Exception { Node n = parse("\n\n var a = {a:0\n,b :1};") .getFirstChild().getFirstChild().getFirstChild(); assertEquals(Token.OBJECTLIT, n.getType()); assertEquals(3, n.getLineno()); assertEquals(9, n.getCharno()); Node key = n.getFirstChild(); assertEquals(Token.STRING_KEY, key.getType()); assertEquals(3, key.getLineno()); assertEquals(10, key.getCharno()); Node value = key.getFirstChild(); assertEquals(Token.NUMBER, value.getType()); assertEquals(3, value.getLineno()); assertEquals(12, value.getCharno()); key = key.getNext(); assertEquals(Token.STRING_KEY, key.getType()); assertEquals(4, key.getLineno()); assertEquals(1, key.getCharno()); value = key.getFirstChild(); assertEquals(Token.NUMBER, value.getType()); assertEquals(4, value.getLineno()); assertEquals(4, value.getCharno()); } public void testLinenoCharnoAdd() throws Exception { testLinenoCharnoBinop("+"); } public void testLinenoCharnoSub() throws Exception { testLinenoCharnoBinop("-"); } public void testLinenoCharnoMul() throws Exception { testLinenoCharnoBinop("*"); } public void testLinenoCharnoDiv() throws Exception { testLinenoCharnoBinop("/"); } public void testLinenoCharnoMod() throws Exception { testLinenoCharnoBinop("%"); } public void testLinenoCharnoShift() throws Exception { testLinenoCharnoBinop("<<"); } public void testLinenoCharnoBinaryAnd() throws Exception { testLinenoCharnoBinop("&"); } public void testLinenoCharnoAnd() throws Exception { testLinenoCharnoBinop("&&"); } public void testLinenoCharnoBinaryOr() throws Exception { testLinenoCharnoBinop("|"); } public void testLinenoCharnoOr() throws Exception { testLinenoCharnoBinop("||"); } public void testLinenoCharnoLt() throws Exception { testLinenoCharnoBinop("<"); } public void testLinenoCharnoLe() throws Exception { testLinenoCharnoBinop("<="); } public void testLinenoCharnoGt() throws Exception { testLinenoCharnoBinop(">"); } public void testLinenoCharnoGe() throws Exception { testLinenoCharnoBinop(">="); } private void testLinenoCharnoBinop(String binop) { Node op = parse("var a = 89 " + binop + " 76").getFirstChild(). getFirstChild().getFirstChild(); assertEquals(1, op.getLineno()); assertEquals(8, op.getCharno()); } public void testJSDocAttachment1() { Node varNode = parse("/** @type number */var a;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); JSDocInfo info = varNode.getJSDocInfo(); assertNotNull(info); assertTypeEquals(NUMBER_TYPE, info.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment2() { Node varNode = parse("/** @type number */var a,b;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); JSDocInfo info = varNode.getJSDocInfo(); assertNotNull(info); assertTypeEquals(NUMBER_TYPE, info.getType()); // First NAME Node nameNode1 = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode1.getType()); assertNull(nameNode1.getJSDocInfo()); // Second NAME Node nameNode2 = nameNode1.getNext(); assertEquals(Token.NAME, nameNode2.getType()); assertNull(nameNode2.getJSDocInfo()); } public void testJSDocAttachment3() { Node assignNode = parse( "/** @type number */goog.FOO = 5;").getFirstChild().getFirstChild(); // ASSIGN assertEquals(Token.ASSIGN, assignNode.getType()); JSDocInfo info = assignNode.getJSDocInfo(); assertNotNull(info); assertTypeEquals(NUMBER_TYPE, info.getType()); } public void testJSDocAttachment4() { Node varNode = parse( "var a, /** @define {number} */b = 5;").getFirstChild(); // ASSIGN assertEquals(Token.VAR, varNode.getType()); assertNull(varNode.getJSDocInfo()); // a Node a = varNode.getFirstChild(); assertNull(a.getJSDocInfo()); // b Node b = a.getNext(); JSDocInfo info = b.getJSDocInfo(); assertNotNull(info); assertTrue(info.isDefine()); assertTypeEquals(NUMBER_TYPE, info.getType()); } public void testJSDocAttachment5() { Node varNode = parse( "var /** @type number */a, /** @define {number} */b = 5;") .getFirstChild(); // ASSIGN assertEquals(Token.VAR, varNode.getType()); assertNull(varNode.getJSDocInfo()); // a Node a = varNode.getFirstChild(); assertNotNull(a.getJSDocInfo()); JSDocInfo info = a.getJSDocInfo(); assertNotNull(info); assertFalse(info.isDefine()); assertTypeEquals(NUMBER_TYPE, info.getType()); // b Node b = a.getNext(); info = b.getJSDocInfo(); assertNotNull(info); assertTrue(info.isDefine()); assertTypeEquals(NUMBER_TYPE, info.getType()); } /** * Tests that a JSDoc comment in an unexpected place of the code does not * propagate to following code due to {@link JSDocInfo} aggregation. */ public void testJSDocAttachment6() throws Exception { Node functionNode = parse( "var a = /** @param {number} index */5;" + "/** @return boolean */function f(index){}") .getFirstChild().getNext(); assertEquals(Token.FUNCTION, functionNode.getType()); JSDocInfo info = functionNode.getJSDocInfo(); assertNotNull(info); assertFalse(info.hasParameter("index")); assertTrue(info.hasReturnType()); assertTypeEquals(UNKNOWN_TYPE, info.getReturnType()); } public void testJSDocAttachment7() { Node varNode = parse("/** */var a;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment8() { Node varNode = parse("/** x */var a;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment9() { Node varNode = parse("/** \n x */var a;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment10() { Node varNode = parse("/** x\n */var a;").getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment11() { Node varNode = parse("/** @type {{x : number, 'y' : string, z}} */var a;") .getFirstChild(); // VAR assertEquals(Token.VAR, varNode.getType()); JSDocInfo info = varNode.getJSDocInfo(); assertNotNull(info); assertTypeEquals(createRecordTypeBuilder(). addProperty("x", NUMBER_TYPE, null). addProperty("y", STRING_TYPE, null). addProperty("z", UNKNOWN_TYPE, null). build(), info.getType()); // NAME Node nameNode = varNode.getFirstChild(); assertEquals(Token.NAME, nameNode.getType()); assertNull(nameNode.getJSDocInfo()); } public void testJSDocAttachment12() { Node varNode = parse("var a = {/** @type {Object} */ b: c};") .getFirstChild(); Node objectLitNode = varNode.getFirstChild().getFirstChild(); assertEquals(Token.OBJECTLIT, objectLitNode.getType()); assertNotNull(objectLitNode.getFirstChild().getJSDocInfo()); } public void testJSDocAttachment13() { Node varNode = parse("/** foo */ var a;").getFirstChild(); assertNotNull(varNode.getJSDocInfo()); } public void testJSDocAttachment14() { Node varNode = parse("/** */ var a;").getFirstChild(); assertNull(varNode.getJSDocInfo()); } public void testJSDocAttachment15() { Node varNode = parse("/** \n * \n */ var a;").getFirstChild(); assertNull(varNode.getJSDocInfo()); } public void testJSDocAttachment16() { Node exprCall = parse("/** @private */ x(); function f() {};").getFirstChild(); assertEquals(Token.EXPR_RESULT, exprCall.getType()); assertNull(exprCall.getNext().getJSDocInfo()); assertNotNull(exprCall.getFirstChild().getJSDocInfo()); } public void testIncorrectJSDocDoesNotAlterJSParsing1() throws Exception { assertNodeEquality( parse("var a = [1,2]"), parse("/** @type Array.<number*/var a = [1,2]", MISSING_GT_MESSAGE)); } public void testIncorrectJSDocDoesNotAlterJSParsing2() throws Exception { assertNodeEquality( parse("var a = [1,2]"), parse("/** @type {Array.<number}*/var a = [1,2]", MISSING_GT_MESSAGE)); } public void testIncorrectJSDocDoesNotAlterJSParsing3() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @param {Array.<number} nums */" + "C.prototype.say=function(nums) {alert(nums.join(','));};", MISSING_GT_MESSAGE)); } public void testIncorrectJSDocDoesNotAlterJSParsing4() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @return boolean */" + "C.prototype.say=function(nums) {alert(nums.join(','));};")); } public void testIncorrectJSDocDoesNotAlterJSParsing5() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @param boolean this is some string*/" + "C.prototype.say=function(nums) {alert(nums.join(','));};")); } public void testIncorrectJSDocDoesNotAlterJSParsing6() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @param {bool!*%E$} */" + "C.prototype.say=function(nums) {alert(nums.join(','));};", "Bad type annotation. expected closing }", "Bad type annotation. expecting a variable name in a @param tag")); } public void testIncorrectJSDocDoesNotAlterJSParsing7() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @see */" + "C.prototype.say=function(nums) {alert(nums.join(','));};", "@see tag missing description")); } public void testIncorrectJSDocDoesNotAlterJSParsing8() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @author */" + "C.prototype.say=function(nums) {alert(nums.join(','));};", "@author tag missing author")); } public void testIncorrectJSDocDoesNotAlterJSParsing9() throws Exception { assertNodeEquality( parse("C.prototype.say=function(nums) {alert(nums.join(','));};"), parse("/** @someillegaltag */" + "C.prototype.say=function(nums) {alert(nums.join(','));};", "illegal use of unknown JSDoc tag \"someillegaltag\";" + " ignoring it")); } public void testUnescapedSlashInRegexpCharClass() throws Exception { // The tokenizer without the fix for this bug throws an error. parse("var foo = /[/]/;"); parse("var foo = /[hi there/]/;"); parse("var foo = /[/yo dude]/;"); parse("var foo = /\\/[@#$/watashi/wa/suteevu/desu]/;"); } private void assertNodeEquality(Node expected, Node found) { String message = expected.checkTreeEquals(found); if (message != null) { fail(message); } } @SuppressWarnings("unchecked") public void testParse() { Node a = Node.newString(Token.NAME, "a"); a.addChildToFront(Node.newString(Token.NAME, "b")); List<ParserResult> testCases = ImmutableList.of( new ParserResult( "3;", createScript(new Node(Token.EXPR_RESULT, Node.newNumber(3.0)))), new ParserResult( "var a = b;", createScript(new Node(Token.VAR, a))), new ParserResult( "\"hell\\\no\\ world\\\n\\\n!\"", createScript(new Node(Token.EXPR_RESULT, Node.newString(Token.STRING, "hello world!"))))); for (ParserResult testCase : testCases) { assertNodeEquality(testCase.node, parse(testCase.code)); } } private Node createScript(Node n) { Node script = new Node(Token.SCRIPT); script.addChildToBack(n); return script; } public void testTrailingCommaWarning1() { parse("var a = ['foo', 'bar'];"); } public void testTrailingCommaWarning2() { parse("var a = ['foo',,'bar'];"); } public void testTrailingCommaWarning3() { parse("var a = ['foo', 'bar',];", TRAILING_COMMA_MESSAGE); mode = LanguageMode.ECMASCRIPT5; parse("var a = ['foo', 'bar',];"); } public void testTrailingCommaWarning4() { parse("var a = [,];", TRAILING_COMMA_MESSAGE); mode = LanguageMode.ECMASCRIPT5; parse("var a = [,];"); } public void testTrailingCommaWarning5() { parse("var a = {'foo': 'bar'};"); } public void testTrailingCommaWarning6() { parse("var a = {'foo': 'bar',};", TRAILING_COMMA_MESSAGE); mode = LanguageMode.ECMASCRIPT5; parse("var a = {'foo': 'bar',};"); } public void testTrailingCommaWarning7() { parseError("var a = {,};", BAD_PROPERTY_MESSAGE); } public void testSuspiciousBlockCommentWarning1() { parse("/* @type {number} */ var x = 3;", SUSPICIOUS_COMMENT_WARNING); } public void testSuspiciousBlockCommentWarning2() { parse("/* \n * @type {number} */ var x = 3;", SUSPICIOUS_COMMENT_WARNING); } public void testCatchClauseForbidden() { parseError("try { } catch (e if true) {}", "Catch clauses are not supported"); } public void testConstForbidden() { parseError("const x = 3;", "Unsupported syntax: CONST"); } public void testDestructuringAssignForbidden() { parseError("var [x, y] = foo();", "destructuring assignment forbidden"); } public void testDestructuringAssignForbidden2() { parseError("var {x, y} = foo();", "missing : after property id"); } public void testDestructuringAssignForbidden3() { parseError("var {x: x, y: y} = foo();", "destructuring assignment forbidden"); } public void testDestructuringAssignForbidden4() { parseError("[x, y] = foo();", "destructuring assignment forbidden", "invalid assignment target"); } public void testLetForbidden() { parseError("function f() { let (x = 3) { alert(x); }; }", "missing ; before statement", "syntax error"); } public void testYieldForbidden() { parseError("function f() { yield 3; }", "missing ; before statement"); } public void testBracelessFunctionForbidden() { parseError("var sq = function(x) x * x;", "missing { before function body"); } public void testGeneratorsForbidden() { parseError("var i = (x for (x in obj));", "Unsupported syntax: GENEXPR"); } public void testGettersForbidden1() { parseError("var x = {get foo() { return 3; }};", IRFactory.GETTER_ERROR_MESSAGE); } public void testGettersForbidden2() { parseError("var x = {get foo bar() { return 3; }};", "invalid property id"); } public void testGettersForbidden3() { parseError("var x = {a getter:function b() { return 3; }};", "missing : after property id", "syntax error"); } public void testGettersForbidden4() { parseError("var x = {\"a\" getter:function b() { return 3; }};", "missing : after property id", "syntax error"); } public void testGettersForbidden5() { parseError("var x = {a: 2, get foo() { return 3; }};", IRFactory.GETTER_ERROR_MESSAGE); } public void testSettersForbidden() { parseError("var x = {set foo() { return 3; }};", IRFactory.SETTER_ERROR_MESSAGE); } public void testSettersForbidden2() { parseError("var x = {a setter:function b() { return 3; }};", "missing : after property id", "syntax error"); } public void testFileOverviewJSDoc1() { Node n = parse("/** @fileoverview Hi mom! */ function Foo() {}"); assertEquals(Token.FUNCTION, n.getFirstChild().getType()); assertTrue(n.getJSDocInfo() != null); assertNull(n.getFirstChild().getJSDocInfo()); assertEquals("Hi mom!", n.getJSDocInfo().getFileOverview()); } public void testFileOverviewJSDocDoesNotHoseParsing() { assertEquals( Token.FUNCTION, parse("/** @fileoverview Hi mom! \n */ function Foo() {}") .getFirstChild().getType()); assertEquals( Token.FUNCTION, parse("/** @fileoverview Hi mom! \n * * * */ function Foo() {}") .getFirstChild().getType()); assertEquals( Token.FUNCTION, parse("/** @fileoverview \n * x */ function Foo() {}") .getFirstChild().getType()); assertEquals( Token.FUNCTION, parse("/** @fileoverview \n * x \n */ function Foo() {}") .getFirstChild().getType()); } public void testFileOverviewJSDoc2() { Node n = parse("/** @fileoverview Hi mom! */ " + "/** @constructor */ function Foo() {}"); assertTrue(n.getJSDocInfo() != null); assertEquals("Hi mom!", n.getJSDocInfo().getFileOverview()); assertTrue(n.getFirstChild().getJSDocInfo() != null); assertFalse(n.getFirstChild().getJSDocInfo().hasFileOverview()); assertTrue(n.getFirstChild().getJSDocInfo().isConstructor()); } public void testObjectLiteralDoc1() { Node n = parse("var x = {/** @type {number} */ 1: 2};"); Node objectLit = n.getFirstChild().getFirstChild().getFirstChild(); assertEquals(Token.OBJECTLIT, objectLit.getType()); Node number = objectLit.getFirstChild(); assertEquals(Token.STRING_KEY, number.getType()); assertNotNull(number.getJSDocInfo()); } public void testDuplicatedParam() { parse("function foo(x, x) {}", "Duplicate parameter name \"x\"."); } public void testGetter() { mode = LanguageMode.ECMASCRIPT3; parseError("var x = {get 1(){}};", IRFactory.GETTER_ERROR_MESSAGE); parseError("var x = {get 'a'(){}};", IRFactory.GETTER_ERROR_MESSAGE); parseError("var x = {get a(){}};", IRFactory.GETTER_ERROR_MESSAGE); mode = LanguageMode.ECMASCRIPT5; parse("var x = {get 1(){}};"); parse("var x = {get 'a'(){}};"); parse("var x = {get a(){}};"); parseError("var x = {get a(b){}};", "getters may not have parameters"); } public void testSetter() { mode = LanguageMode.ECMASCRIPT3; parseError("var x = {set 1(x){}};", IRFactory.SETTER_ERROR_MESSAGE); parseError("var x = {set 'a'(x){}};", IRFactory.SETTER_ERROR_MESSAGE); parseError("var x = {set a(x){}};", IRFactory.SETTER_ERROR_MESSAGE); mode = LanguageMode.ECMASCRIPT5; parse("var x = {set 1(x){}};"); parse("var x = {set 'a'(x){}};"); parse("var x = {set a(x){}};"); parseError("var x = {set a(){}};", "setters must have exactly one parameter"); } public void testLamestWarningEver() { // This used to be a warning. parse("var x = /** @type {undefined} */ (y);"); parse("var x = /** @type {void} */ (y);"); } public void testUnfinishedComment() { parseError("/** this is a comment ", "unterminated comment"); } public void testParseBlockDescription() { Node n = parse("/** This is a variable. */ var x;"); Node var = n.getFirstChild(); assertNotNull(var.getJSDocInfo()); assertEquals("This is a variable.", var.getJSDocInfo().getBlockDescription()); } public void testUnnamedFunctionStatement() { // Statements parseError("function() {};", "unnamed function statement"); parseError("if (true) { function() {}; }", "unnamed function statement"); parse("function f() {};"); // Expressions parse("(function f() {});"); parse("(function () {});"); } public void testReservedKeywords() { boolean isIdeMode = false; mode = LanguageMode.ECMASCRIPT3; parseError("var boolean;", "missing variable name"); parseError("function boolean() {};", "missing ( before function parameters."); parseError("boolean = 1;", "identifier is a reserved word"); parseError("class = 1;", "identifier is a reserved word"); parseError("public = 2;", "identifier is a reserved word"); mode = LanguageMode.ECMASCRIPT5; parse("var boolean;"); parse("function boolean() {};"); parse("boolean = 1;"); parseError("class = 1;", "identifier is a reserved word"); parse("public = 2;"); mode = LanguageMode.ECMASCRIPT5_STRICT; parse("var boolean;"); parse("function boolean() {};"); parse("boolean = 1;"); parseError("class = 1;", "identifier is a reserved word"); parseError("public = 2;", "identifier is a reserved word"); } public void testKeywordsAsProperties() { boolean isIdeMode = false; mode = LanguageMode.ECMASCRIPT3; parseError("var x = {function: 1};", "invalid property id"); parseError("x.function;", "missing name after . operator"); parseError("var x = {get x(){} };", IRFactory.GETTER_ERROR_MESSAGE); parseError("var x = {get function(){} };", "invalid property id"); parseError("var x = {get 'function'(){} };", IRFactory.GETTER_ERROR_MESSAGE); parseError("var x = {get 1(){} };", IRFactory.GETTER_ERROR_MESSAGE); parseError("var x = {set function(a){} };", "invalid property id"); parseError("var x = {set 'function'(a){} };", IRFactory.SETTER_ERROR_MESSAGE); parseError("var x = {set 1(a){} };", IRFactory.SETTER_ERROR_MESSAGE); parseError("var x = {class: 1};", "invalid property id"); parseError("x.class;", "missing name after . operator"); parse("var x = {let: 1};"); parse("x.let;"); parse("var x = {yield: 1};"); parse("x.yield;"); mode = LanguageMode.ECMASCRIPT5; parse("var x = {function: 1};"); parse("x.function;"); parse("var x = {get function(){} };"); parse("var x = {get 'function'(){} };"); parse("var x = {get 1(){} };"); parse("var x = {set function(a){} };"); parse("var x = {set 'function'(a){} };"); parse("var x = {set 1(a){} };"); parse("var x = {class: 1};"); parse("x.class;"); parse("var x = {let: 1};"); parse("x.let;"); parse("var x = {yield: 1};"); parse("x.yield;"); mode = LanguageMode.ECMASCRIPT5_STRICT; parse("var x = {function: 1};"); parse("x.function;"); parse("var x = {get function(){} };"); parse("var x = {get 'function'(){} };"); parse("var x = {get 1(){} };"); parse("var x = {set function(a){} };"); parse("var x = {set 'function'(a){} };"); parse("var x = {set 1(a){} };"); parse("var x = {class: 1};"); parse("x.class;"); parse("var x = {let: 1};"); parse("x.let;"); parse("var x = {yield: 1};"); parse("x.yield;"); } public void testGetPropFunctionName() { parseError("function a.b() {}", "missing ( before function parameters."); parseError("var x = function a.b() {}", "missing ( before function parameters."); } public void testGetPropFunctionNameIdeMode() { // In IDE mode, we try to fix up the tree, but sometimes // this leads to even more errors. isIdeMode = true; parseError("function a.b() {}", "missing ( before function parameters.", "missing formal parameter", "missing ) after formal parameters", "missing { before function body", "syntax error", "missing ; before statement", "missing ; before statement", "missing } after function body", "Unsupported syntax: ERROR", "Unsupported syntax: ERROR"); parseError("var x = function a.b() {}", "missing ( before function parameters.", "missing formal parameter", "missing ) after formal parameters", "missing { before function body", "syntax error", "missing ; before statement", "missing ; before statement", "missing } after function body", "Unsupported syntax: ERROR", "Unsupported syntax: ERROR"); } public void testIdeModePartialTree() { Node partialTree = parseError("function Foo() {} f.", "missing name after . operator"); assertNull(partialTree); isIdeMode = true; partialTree = parseError("function Foo() {} f.", "missing name after . operator"); assertNotNull(partialTree); } public void testForEach() { parseError( "function f(stamp, status) {\n" + " for each ( var curTiming in this.timeLog.timings ) {\n" + " if ( curTiming.callId == stamp ) {\n" + " curTiming.flag = status;\n" + " break;\n" + " }\n" + " }\n" + "};", "unsupported language extension: for each"); } public void testMisplacedTypeAnnotation1() { // misuse with COMMA parse( "var o = {};" + "/** @type {string} */ o.prop1 = 1, o.prop2 = 2;", MISPLACED_TYPE_ANNOTATION); } public void testMisplacedTypeAnnotation2() { // missing parenthese for the cast. parse( "var o = /** @type {string} */ getValue();", MISPLACED_TYPE_ANNOTATION); } public void testMisplacedTypeAnnotation3() { // missing parenthese for the cast. parse( "var o = 1 + /** @type {string} */ value;", MISPLACED_TYPE_ANNOTATION); } public void testMisplacedTypeAnnotation4() { // missing parenthese for the cast. parse( "var o = /** @type {!Array.<string>} */ ['hello', 'you'];", MISPLACED_TYPE_ANNOTATION); } public void testMisplacedTypeAnnotation5() { // missing parenthese for the cast. parse( "var o = (/** @type {!Foo} */ {});", MISPLACED_TYPE_ANNOTATION); } public void testMisplacedTypeAnnotation6() { parse("var o = /** @type {function():string} */ function() {return 'str';}", MISPLACED_TYPE_ANNOTATION); } public void testValidTypeAnnotation1() { parse("/** @type {string} */ var o = 'str';"); parse("var /** @type {string} */ o = 'str', /** @type {number} */ p = 0;"); parse("/** @type {function():string} */ function o() { return 'str'; }"); parse("var o = {}; /** @type {string} */ o.prop = 'str';"); parse("var o = {}; /** @type {string} */ o['prop'] = 'str';"); parse("var o = { /** @type {string} */ prop : 'str' };"); parse("var o = { /** @type {string} */ 'prop' : 'str' };"); parse("var o = { /** @type {string} */ 1 : 'str' };"); } public void testValidTypeAnnotation2() { mode = LanguageMode.ECMASCRIPT5; parse("var o = { /** @type {string} */ get prop() { return 'str' }};"); parse("var o = { /** @type {string} */ set prop(s) {}};"); } public void testValidTypeAnnotation3() { // These two we don't currently support in the type checker but // we would like to. parse("try {} catch (/** @type {Error} */ e) {}"); parse("function f(/** @type {string} */ a) {}"); } /** * Verify that the given code has the given parse errors. * @return If in IDE mode, returns a partial tree. */ private Node parseError(String string, String... errors) { TestErrorReporter testErrorReporter = new TestErrorReporter(errors, null); Node script = null; try { StaticSourceFile file = new SimpleSourceFile("input", false); script = ParserRunner.parse( file, string, ParserRunner.createConfig(isIdeMode, mode, false), testErrorReporter, Logger.getAnonymousLogger()); } catch (IOException e) { throw new RuntimeException(e); } // verifying that all warnings were seen assertTrue(testErrorReporter.hasEncounteredAllErrors()); assertTrue(testErrorReporter.hasEncounteredAllWarnings()); return script; } private Node parse(String string, String... warnings) { TestErrorReporter testErrorReporter = new TestErrorReporter(null, warnings); Node script = null; try { StaticSourceFile file = new SimpleSourceFile("input", false); script = ParserRunner.parse( file, string, ParserRunner.createConfig(true, mode, false), testErrorReporter, Logger.getAnonymousLogger()); } catch (IOException e) { throw new RuntimeException(e); } // verifying that all warnings were seen assertTrue(testErrorReporter.hasEncounteredAllErrors()); assertTrue(testErrorReporter.hasEncounteredAllWarnings()); return script; } private static class ParserResult { private final String code; private final Node node; private ParserResult(String code, Node node) { this.code = code; this.node = node; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.linq4j; import org.apache.calcite.linq4j.function.Function1; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.RandomAccess; /** * Utility and factory methods for Linq4j. */ public abstract class Linq4j { private Linq4j() {} private static final Object DUMMY = new Object(); public static Method getMethod(String className, String methodName, Class... parameterTypes) { try { return Class.forName(className).getMethod(methodName, parameterTypes); } catch (NoSuchMethodException | ClassNotFoundException e) { return null; } } /** * Query provider that simply executes a {@link Queryable} by calling its * enumerator method; does not attempt optimization. */ public static final QueryProvider DEFAULT_PROVIDER = new QueryProviderImpl() { public <T> Enumerator<T> executeQuery(Queryable<T> queryable) { return queryable.enumerator(); } }; private static final Enumerator<Object> EMPTY_ENUMERATOR = new Enumerator<Object>() { public Object current() { throw new NoSuchElementException(); } public boolean moveNext() { return false; } public void reset() { } public void close() { } }; public static final Enumerable<?> EMPTY_ENUMERABLE = new AbstractEnumerable<Object>() { public Enumerator<Object> enumerator() { return EMPTY_ENUMERATOR; } }; /** * Adapter that converts an enumerator into an iterator. * * <p><b>WARNING</b>: The iterator returned by this method does not call * {@link org.apache.calcite.linq4j.Enumerator#close()}, so it is not safe to * use with an enumerator that allocates resources.</p> * * @param enumerator Enumerator * @param <T> Element type * * @return Iterator */ public static <T> Iterator<T> enumeratorIterator(Enumerator<T> enumerator) { return new EnumeratorIterator<>(enumerator); } /** * Adapter that converts an iterable into an enumerator. * * @param iterable Iterable * @param <T> Element type * * @return enumerator */ public static <T> Enumerator<T> iterableEnumerator( final Iterable<? extends T> iterable) { if (iterable instanceof Enumerable) { @SuppressWarnings("unchecked") final Enumerable<T> enumerable = (Enumerable) iterable; return enumerable.enumerator(); } return new IterableEnumerator<>(iterable); } /** * Adapter that converts an {@link List} into an {@link Enumerable}. * * @param list List * @param <T> Element type * * @return enumerable */ public static <T> Enumerable<T> asEnumerable(final List<T> list) { return new ListEnumerable<>(list); } /** * Adapter that converts an {@link Collection} into an {@link Enumerable}. * * <p>It uses more efficient implementations if the iterable happens to * be a {@link List}.</p> * * @param collection Collection * @param <T> Element type * * @return enumerable */ public static <T> Enumerable<T> asEnumerable(final Collection<T> collection) { if (collection instanceof List) { //noinspection unchecked return asEnumerable((List) collection); } return new CollectionEnumerable<>(collection); } /** * Adapter that converts an {@link Iterable} into an {@link Enumerable}. * * <p>It uses more efficient implementations if the iterable happens to * be a {@link Collection} or a {@link List}.</p> * * @param iterable Iterable * @param <T> Element type * * @return enumerable */ public static <T> Enumerable<T> asEnumerable(final Iterable<T> iterable) { if (iterable instanceof Collection) { //noinspection unchecked return asEnumerable((Collection) iterable); } return new IterableEnumerable<>(iterable); } /** * Adapter that converts an array into an enumerable. * * @param ts Array * @param <T> Element type * * @return enumerable */ public static <T> Enumerable<T> asEnumerable(final T[] ts) { return new ListEnumerable<>(Arrays.asList(ts)); } /** * Adapter that converts a collection into an enumerator. * * @param values Collection * @param <V> Element type * * @return Enumerator over the collection */ public static <V> Enumerator<V> enumerator(Collection<? extends V> values) { if (values instanceof List && values instanceof RandomAccess) { //noinspection unchecked return listEnumerator((List) values); } return iterableEnumerator(values); } private static <V> Enumerator<V> listEnumerator(List<? extends V> list) { return new ListEnumerator<>(list); } /** Applies a function to each element of an Enumerator. * * @param enumerator Backing enumerator * @param func Transform function * @param <F> Backing element type * @param <E> Element type * @return Enumerator */ public static <F, E> Enumerator<E> transform(Enumerator<F> enumerator, final Function1<F, E> func) { return new TransformedEnumerator<F, E>(enumerator) { protected E transform(F from) { return func.apply(from); } }; } /** * Converts the elements of a given Iterable to the specified type. * * <p>This method is implemented by using deferred execution. The immediate * return value is an object that stores all the information that is * required to perform the action. The query represented by this method is * not executed until the object is enumerated either by calling its * {@link Enumerable#enumerator} method directly or by using * {@code for (... in ...)}. * * <p>Since standard Java {@link Collection} objects implement the * {@link Iterable} interface, the {@code cast} method enables the standard * query operators to be invoked on collections * (including {@link java.util.List} and {@link java.util.Set}) by supplying * the necessary type information. For example, {@link ArrayList} does not * implement {@link Enumerable}&lt;F&gt;, but you can invoke * * <blockquote><code>Linq4j.cast(list, Integer.class)</code></blockquote> * * <p>to convert the list of an enumerable that can be queried using the * standard query operators. * * <p>If an element cannot be cast to type &lt;TResult&gt;, this method will * throw a {@link ClassCastException}. To obtain only those elements that * can be cast to type TResult, use the {@link #ofType} method instead. * * @see Enumerable#cast(Class) * @see #ofType * @see #asEnumerable(Iterable) */ public static <TSource, TResult> Enumerable<TResult> cast( Iterable<TSource> source, Class<TResult> clazz) { return asEnumerable(source).cast(clazz); } /** * Returns elements of a given {@link Iterable} that are of the specified * type. * * <p>This method is implemented by using deferred execution. The immediate * return value is an object that stores all the information that is * required to perform the action. The query represented by this method is * not executed until the object is enumerated either by calling its * {@link Enumerable#enumerator} method directly or by using * {@code for (... in ...)}. * * <p>The {@code ofType} method returns only those elements in source that * can be cast to type TResult. To instead receive an exception if an * element cannot be cast to type TResult, use * {@link #cast(Iterable, Class)}.</p> * * <p>Since standard Java {@link Collection} objects implement the * {@link Iterable} interface, the {@code cast} method enables the standard * query operators to be invoked on collections * (including {@link java.util.List} and {@link java.util.Set}) by supplying * the necessary type information. For example, {@link ArrayList} does not * implement {@link Enumerable}&lt;F&gt;, but you can invoke * * <blockquote><code>Linq4j.ofType(list, Integer.class)</code></blockquote> * * <p>to convert the list of an enumerable that can be queried using the * standard query operators. * * @see Enumerable#cast(Class) * @see #cast */ public static <TSource, TResult> Enumerable<TResult> ofType( Iterable<TSource> source, Class<TResult> clazz) { return asEnumerable(source).ofType(clazz); } /** * Returns an {@link Enumerable} that has one element. * * @param <T> Element type * * @return Singleton enumerable */ public static <T> Enumerable<T> singletonEnumerable(final T element) { return new AbstractEnumerable<T>() { public Enumerator<T> enumerator() { return singletonEnumerator(element); } }; } /** * Returns an {@link Enumerator} that has one element. * * @param <T> Element type * * @return Singleton enumerator */ public static <T> Enumerator<T> singletonEnumerator(T element) { return new SingletonEnumerator<>(element); } /** * Returns an {@link Enumerator} that has one null element. * * @param <T> Element type * * @return Singleton enumerator */ public static <T> Enumerator<T> singletonNullEnumerator() { return new SingletonNullEnumerator<>(); } /** * Returns an {@link Enumerable} that has no elements. * * @param <T> Element type * * @return Empty enumerable */ public static <T> Enumerable<T> emptyEnumerable() { //noinspection unchecked return (Enumerable<T>) EMPTY_ENUMERABLE; } /** * Returns an {@link Enumerator} that has no elements. * * @param <T> Element type * * @return Empty enumerator */ public static <T> Enumerator<T> emptyEnumerator() { //noinspection unchecked return (Enumerator<T>) EMPTY_ENUMERATOR; } /** * Concatenates two or more {@link Enumerable}s to form a composite * enumerable that contains the union of their elements. * * @param enumerableList List of enumerable objects * @param <E> Element type * * @return Composite enumerator */ public static <E> Enumerable<E> concat( final List<Enumerable<E>> enumerableList) { return new CompositeEnumerable<>(enumerableList); } /** * Returns an enumerator that is the cartesian product of the given * enumerators. * * <p>For example, given enumerator A that returns {"a", "b", "c"} and * enumerator B that returns {"x", "y"}, product(List(A, B)) will return * {List("a", "x"), List("a", "y"), * List("b", "x"), List("b", "y"), * List("c", "x"), List("c", "y")}.</p> * * <p>Notice that the cardinality of the result is the product of the * cardinality of the inputs. The enumerators A and B have 3 and 2 * elements respectively, and the result has 3 * 2 = 6 elements. * This is always the case. In * particular, if any of the enumerators is empty, the result is empty.</p> * * @param enumerators List of enumerators * @param <T> Element type * * @return Enumerator over the cartesian product */ public static <T> Enumerator<List<T>> product( List<Enumerator<T>> enumerators) { return new CartesianProductListEnumerator<>(enumerators); } /** Returns the cartesian product of an iterable of iterables. */ public static <T> Iterable<List<T>> product( final Iterable<? extends Iterable<T>> iterables) { return () -> { final List<Enumerator<T>> enumerators = new ArrayList<>(); for (Iterable<T> iterable : iterables) { enumerators.add(iterableEnumerator(iterable)); } return enumeratorIterator( new CartesianProductListEnumerator<>(enumerators)); }; } /** * Returns whether the arguments are equal to each other. * * <p>Equivalent to {@link java.util.Objects#equals} in JDK 1.7 and above. */ @Deprecated // to be removed before 2.0 public static <T> boolean equals(T t0, T t1) { return Objects.equals(t0, t1); } /** * Throws {@link NullPointerException} if argument is null, otherwise * returns argument. * * <p>Equivalent to {@link java.util.Objects#requireNonNull} in JDK 1.7 and * above. */ @Deprecated // to be removed before 2.0 public static <T> T requireNonNull(T o) { if (o == null) { throw new NullPointerException(); } return o; } /** Closes an iterator, if it can be closed. */ private static <T> void closeIterator(Iterator<T> iterator) { if (iterator instanceof AutoCloseable) { try { ((AutoCloseable) iterator).close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } } /** Iterable enumerator. * * @param <T> element type */ @SuppressWarnings("unchecked") static class IterableEnumerator<T> implements Enumerator<T> { private final Iterable<? extends T> iterable; Iterator<? extends T> iterator; T current; IterableEnumerator(Iterable<? extends T> iterable) { this.iterable = iterable; iterator = iterable.iterator(); current = (T) DUMMY; } public T current() { if (current == DUMMY) { throw new NoSuchElementException(); } return current; } public boolean moveNext() { if (iterator.hasNext()) { current = iterator.next(); return true; } current = (T) DUMMY; return false; } public void reset() { iterator = iterable.iterator(); current = (T) DUMMY; } public void close() { final Iterator<? extends T> iterator1 = this.iterator; this.iterator = null; closeIterator(iterator1); } } /** Composite enumerable. * * @param <E> element type */ static class CompositeEnumerable<E> extends AbstractEnumerable<E> { private final Enumerator<Enumerable<E>> enumerableEnumerator; CompositeEnumerable(List<Enumerable<E>> enumerableList) { enumerableEnumerator = iterableEnumerator(enumerableList); } public Enumerator<E> enumerator() { return new Enumerator<E>() { // Never null. Enumerator<E> current = emptyEnumerator(); public E current() { return current.current(); } public boolean moveNext() { for (;;) { if (current.moveNext()) { return true; } current.close(); if (!enumerableEnumerator.moveNext()) { current = emptyEnumerator(); return false; } current = enumerableEnumerator.current().enumerator(); } } public void reset() { enumerableEnumerator.reset(); current = emptyEnumerator(); } public void close() { current.close(); current = emptyEnumerator(); } }; } } /** Iterable enumerable. * * @param <T> element type */ static class IterableEnumerable<T> extends AbstractEnumerable2<T> { protected final Iterable<T> iterable; IterableEnumerable(Iterable<T> iterable) { this.iterable = iterable; } public Iterator<T> iterator() { return iterable.iterator(); } @Override public boolean any() { return iterable.iterator().hasNext(); } } /** Collection enumerable. * * @param <T> element type */ static class CollectionEnumerable<T> extends IterableEnumerable<T> { CollectionEnumerable(Collection<T> iterable) { super(iterable); } protected Collection<T> getCollection() { return (Collection<T>) iterable; } @Override public int count() { return getCollection().size(); } @Override public long longCount() { return getCollection().size(); } @Override public boolean contains(T element) { return getCollection().contains(element); } @Override public boolean any() { return !getCollection().isEmpty(); } } /** List enumerable. * * @param <T> element type */ static class ListEnumerable<T> extends CollectionEnumerable<T> { ListEnumerable(List<T> list) { super(list); } @Override public Enumerator<T> enumerator() { if (iterable instanceof RandomAccess) { //noinspection unchecked return new ListEnumerator<>((List) iterable); } return super.enumerator(); } @Override public List<T> toList() { return (List<T>) iterable; } @Override public Enumerable<T> skip(int count) { final List<T> list = toList(); if (count >= list.size()) { return Linq4j.emptyEnumerable(); } return new ListEnumerable<>(list.subList(count, list.size())); } @Override public Enumerable<T> take(int count) { final List<T> list = toList(); if (count >= list.size()) { return this; } return new ListEnumerable<>(list.subList(0, count)); } @Override public T elementAt(int index) { return toList().get(index); } } /** Enumerator that returns one element. * * @param <E> element type */ private static class SingletonEnumerator<E> implements Enumerator<E> { final E e; int i = 0; SingletonEnumerator(E e) { this.e = e; } public E current() { return e; } public boolean moveNext() { return i++ == 0; } public void reset() { i = 0; } public void close() { } } /** Enumerator that returns one null element. * * @param <E> element type */ private static class SingletonNullEnumerator<E> implements Enumerator<E> { int i = 0; public E current() { return null; } public boolean moveNext() { return i++ == 0; } public void reset() { i = 0; } public void close() { } } /** Iterator that reads from an underlying {@link Enumerator}. * * @param <T> element type */ private static class EnumeratorIterator<T> implements Iterator<T>, AutoCloseable { private final Enumerator<T> enumerator; boolean hasNext; EnumeratorIterator(Enumerator<T> enumerator) { this.enumerator = enumerator; hasNext = enumerator.moveNext(); } public boolean hasNext() { return hasNext; } public T next() { T t = enumerator.current(); hasNext = enumerator.moveNext(); return t; } public void remove() { throw new UnsupportedOperationException(); } public void close() { enumerator.close(); } } /** Enumerator optimized for random-access list. * * @param <V> element type */ private static class ListEnumerator<V> implements Enumerator<V> { private final List<? extends V> list; int i = -1; ListEnumerator(List<? extends V> list) { this.list = list; } public V current() { return list.get(i); } public boolean moveNext() { return ++i < list.size(); } public void reset() { i = -1; } public void close() { } } /** Enumerates over the cartesian product of the given lists, returning * a list for each row. * * @param <E> element type */ private static class CartesianProductListEnumerator<E> extends CartesianProductEnumerator<E, List<E>> { CartesianProductListEnumerator(List<Enumerator<E>> enumerators) { super(enumerators); } public List<E> current() { return Arrays.asList(elements.clone()); } } } // End Linq4j.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.guacamole.auth.jdbc.sharing.user; import com.google.inject.Inject; import java.util.Collection; import java.util.Collections; import org.apache.guacamole.GuacamoleException; import org.apache.guacamole.auth.jdbc.sharing.connection.SharedConnectionDirectory; import org.apache.guacamole.auth.jdbc.sharing.connectiongroup.SharedRootConnectionGroup; import org.apache.guacamole.auth.jdbc.user.RemoteAuthenticatedUser; import org.apache.guacamole.form.Form; import org.apache.guacamole.net.auth.ActiveConnection; import org.apache.guacamole.net.auth.ActivityRecord; import org.apache.guacamole.net.auth.ActivityRecordSet; import org.apache.guacamole.net.auth.AuthenticationProvider; import org.apache.guacamole.net.auth.Connection; import org.apache.guacamole.net.auth.ConnectionGroup; import org.apache.guacamole.net.auth.ConnectionRecord; import org.apache.guacamole.net.auth.Directory; import org.apache.guacamole.net.auth.SharingProfile; import org.apache.guacamole.net.auth.User; import org.apache.guacamole.net.auth.UserContext; import org.apache.guacamole.net.auth.simple.SimpleActivityRecordSet; import org.apache.guacamole.net.auth.simple.SimpleConnectionGroupDirectory; import org.apache.guacamole.net.auth.simple.SimpleDirectory; /** * The user context of a SharedUser, providing access ONLY to the user * themselves, the any SharedConnections associated with that user via share * keys, and an internal root connection group containing only those * connections. */ public class SharedUserContext implements UserContext { /** * The AuthenticationProvider that created this SharedUserContext. */ private AuthenticationProvider authProvider; /** * The user whose level of access is represented by this user context. */ private User self; /** * A directory of all connections visible to the user for whom this user * context was created. */ @Inject private SharedConnectionDirectory connectionDirectory; /** * A directory of all connection groups visible to the user for whom this * user context was created. */ private Directory<ConnectionGroup> connectionGroupDirectory; /** * A directory of all users visible to the user for whom this user context * was created. */ private Directory<User> userDirectory; /** * The root connection group of the hierarchy containing all connections * and connection groups visible to the user for whom this user context was * created. */ private ConnectionGroup rootGroup; /** * Creates a new SharedUserContext which provides access ONLY to the given * user, the SharedConnections associated with the share keys used by that * user, and an internal root connection group containing only those * SharedConnections. * * @param authProvider * The AuthenticationProvider that created this * SharedUserContext; * * @param user * The RemoteAuthenticatedUser for whom this SharedUserContext is being * created. */ public void init(AuthenticationProvider authProvider, RemoteAuthenticatedUser user) { // Associate the originating authentication provider this.authProvider = authProvider; // Provide access to all connections shared with the given user this.connectionDirectory.init(user); // The connection group directory contains only the root group this.rootGroup = new SharedRootConnectionGroup(this); this.connectionGroupDirectory = new SimpleConnectionGroupDirectory( Collections.singletonList(this.rootGroup)); // Create internal pseudo-account representing the authenticated user this.self = new SharedUser(user, this); // Do not provide access to any user accounts via the directory this.userDirectory = new SimpleDirectory<User>(); } /** * Registers a new share key with this SharedUserContext such that the user * will have access to the connection associated with that share key. The * share key will be automatically de-registered when it is no longer valid. * * @param shareKey * The share key to register. */ public void registerShareKey(String shareKey) { connectionDirectory.registerShareKey(shareKey); } @Override public User self() { return self; } @Override public Object getResource() throws GuacamoleException { return null; } @Override public AuthenticationProvider getAuthenticationProvider() { return authProvider; } @Override public Directory<User> getUserDirectory() { return userDirectory; } @Override public Directory<Connection> getConnectionDirectory() throws GuacamoleException { return connectionDirectory; } @Override public Directory<ConnectionGroup> getConnectionGroupDirectory() { return connectionGroupDirectory; } @Override public Directory<ActiveConnection> getActiveConnectionDirectory() throws GuacamoleException { return new SimpleDirectory<ActiveConnection>(); } @Override public Directory<SharingProfile> getSharingProfileDirectory() throws GuacamoleException { return new SimpleDirectory<SharingProfile>(); } @Override public ActivityRecordSet<ConnectionRecord> getConnectionHistory() { return new SimpleActivityRecordSet<ConnectionRecord>(); } @Override public ActivityRecordSet<ActivityRecord> getUserHistory() throws GuacamoleException { return new SimpleActivityRecordSet<ActivityRecord>(); } @Override public ConnectionGroup getRootConnectionGroup() { return rootGroup; } @Override public Collection<Form> getUserAttributes() { return Collections.<Form>emptyList(); } @Override public Collection<Form> getConnectionAttributes() { return Collections.<Form>emptyList(); } @Override public Collection<Form> getConnectionGroupAttributes() { return Collections.<Form>emptyList(); } @Override public Collection<Form> getSharingProfileAttributes() { return Collections.<Form>emptyList(); } @Override public void invalidate() { // Nothing to invalidate } }
package de.kleppmann.maniation.dynamics; import java.io.FileWriter; import java.io.IOException; import java.text.DecimalFormat; import java.util.List; import java.util.Map; import java.util.Set; import de.kleppmann.maniation.maths.ConjugateGradient; import de.kleppmann.maniation.maths.Matrix; import de.kleppmann.maniation.maths.ODE; import de.kleppmann.maniation.maths.ODEBacktrackException; import de.kleppmann.maniation.maths.RungeKutta; import de.kleppmann.maniation.maths.SparseMatrix; import de.kleppmann.maniation.maths.Vector; import de.kleppmann.maniation.maths.Vector3D; import de.kleppmann.maniation.maths.VectorImpl; public class Simulation { public static final double RESTING_TOLERANCE = 1e-9; public static final double PENETRATION_TOLERANCE = 5e-9; public static final double ELASTICITY = 1.0; public static final double FRAMES_PER_SECOND = 100.0; public static final boolean ENABLE_FUDGE = false; public static final boolean ENABLE_CONTINUATION = false; public static final double CONTINUATION_TIME = 3.78; public static final String CONTINUATION_FILE = "/home/martin/graphics/maniation/matlab/restart"; private World world = new World(); private SimulationObject.State worldState = world.getInitialState(); private List<GeneralizedBody> bodies = new java.util.ArrayList<GeneralizedBody>(); private CompoundBody compoundBody; private List<String> log = new java.util.ArrayList<String>(); public void addBody(GeneralizedBody body) { bodies.add(body); } public World getWorld() { return world; } private void writeLog(String filename, int dimension, List<String> log) { try { FileWriter writer = new FileWriter("/home/martin/graphics/maniation/matlab/" + filename); writer.write("# name: data\n"); writer.write("# type: matrix\n"); writer.write("# rows: " + log.size() + "\n"); writer.write("# columns: " + dimension + "\n"); for (String line : log) writer.write(line + "\n"); writer.close(); } catch (IOException e) { System.err.println(e); } } public void run(double time) { double startTime = 0; // Set up compound body compoundBody = new CompoundBody(world, bodies.toArray(new GeneralizedBody[bodies.size()])); GeneralizedBody.State initialState = compoundBody.getInitialState(); if (ENABLE_CONTINUATION) { initialState = initialState.load(new VectorImpl(CONTINUATION_FILE)); startTime = CONTINUATION_TIME; } log.clear(); log.add(startTime + " " + initialState.toString()); // Initialize ODE solver RungeKutta solver = new RungeKutta(new DifferentialEquation(), 1.0/FRAMES_PER_SECOND); solver.setMaxTimeStep(1.0/FRAMES_PER_SECOND); //solver.setMaxTimeStep(0.2/FRAMES_PER_SECOND); // Run simulation solver.solve(startTime, time); // Write results to file writeLog("javadata", initialState.getDimension() + 1, log); } private void checkPenetration(StateVector state, InteractionList il, double time) throws ODEBacktrackException { // Check for colliding contacts and abort this simulation step if necessary double penetrationTime = 0.0; boolean penetrated = false; Set<Constraint> contacts = new java.util.HashSet<Constraint>(); contacts.addAll(il.getCollidingContacts()); contacts.addAll(il.getRestingContacts()); for (Constraint c : contacts) { for (int i=0; i<c.getDimension(); i++) { double d = c.getPenalty().getComponent(i); DecimalFormat format = new DecimalFormat("0.00000"); String type = "?"; if (c instanceof VertexFaceCollision) type = "v"; if (c instanceof EdgeEdgeCollision) type = "e"; if (c instanceof RotationConstraint) type = "a"; System.out.print("[" + format.format(d) + " / " + format.format(c.getPenaltyDot().getComponent(i)) + " " + type + "] "); if (d < -PENETRATION_TOLERANCE) { double t = (d + PENETRATION_TOLERANCE) / c.getPenaltyDot().getComponent(i); if ((!penetrated) || (t > penetrationTime)) penetrationTime = t; penetrated = true; } } } if (penetrated) throw new ODEBacktrackException(time, time - penetrationTime); } private StateVector fudgeInequalities(StateVector state, InteractionList il) { if (!ENABLE_FUDGE) return state; // Resets inequality constraints which have gone slightly negative to their zero position Map<Body, Vector3D> positionMap = new java.util.HashMap<Body, Vector3D>(); Set<InequalityConstraint> contacts = new java.util.HashSet<InequalityConstraint>(); contacts.addAll(il.getCollidingContacts()); contacts.addAll(il.getRestingContacts()); for (InequalityConstraint c : contacts) positionMap.putAll(c.setToZero()); GeneralizedBody.State result = state.applyPosition(positionMap); if (!(result instanceof StateVector)) throw new IllegalStateException(); return (StateVector) result; } private StateVector constraintImpulses(StateVector state, InteractionList il, double time) { int iterations = 0; while (iterations < 100) { iterations++; // Repeat until there are no more colliding contacts il.classifyConstraints(state); if (il.getCollidingContacts().size() == 0) break; System.out.print("*"); // Set up elasticity vector (conceptually a diagonal matrix) Set<Constraint> constrs = new java.util.HashSet<Constraint>(); constrs.addAll(il.getEqualityConstraints()); constrs.addAll(il.getCollidingContacts()); il.compileConstraints(state, constrs); double[] el = new double[il.getPenalty().getDimension()]; for (int i=0; i<el.length; i++) el[i] = 1.0; for (InequalityConstraint c : il.getCollidingContacts()) { int offset = il.getConstraintOffset(c); for (int i=0; i<c.getDimension(); i++) el[i+offset] = 1.0 + c.getElasticity(); } // Set up Lagrange multiplier equation and solve it Vector rhs = (new VectorImpl(el)).multComponents( il.getJacobian().mult(state.getVelocities())).mult(-1.0); Matrix[] lhs = new Matrix[3]; lhs[0] = il.getJacobian(); lhs[1] = state.getMassInertia().inverse(); lhs[2] = il.getJacobian().transpose(); Vector lambda = (new ConjugateGradient(lhs, rhs)).solve(); // Compute constraint impulses from Lagrange multipliers, and apply them to the system Vector impulse; if (il.getJacobian().getRows() == lambda.getDimension()) { impulse = il.getJacobian().transpose().mult(lambda); } else { SparseMatrix.Slice[] slices = new SparseMatrix.Slice[1]; slices[0] = new SparseMatrix.SliceImpl(il.getJacobian().transpose(), 0, 0); SparseMatrix jac = new SparseMatrix(il.getJacobian().getColumns(), lambda.getDimension(), slices); impulse = jac.mult(lambda); } state = state.applyImpulse(impulse); } return state; } private StateVector constraintForces(StateVector state, InteractionList il) { Vector lambda; Set<Constraint> constrs = new java.util.HashSet<Constraint>(); Set<Constraint> contacts = new java.util.HashSet<Constraint>(); constrs.addAll(il.getAllConstraints()); contacts.addAll(il.getAllConstraints()); contacts.removeAll(il.getEqualityConstraints()); /*constrs.addAll(il.getEqualityConstraints()); constrs.addAll(il.getRestingContacts()); contacts.addAll(il.getRestingContacts());*/ boolean lambdaNegative; do { il.compileConstraints(state, constrs); // If there are no constraints, do nothing if (il.getJacobian().getRows() == 0) return state; // Set up Lagrange multiplier equation and solve it Vector term1 = il.getJacobianDot().mult(state.getVelocities()); Vector term2 = il.getJacobian().mult(state.getAccelerations()); Vector rhs = term1.add(term2).add(il.getPenalty()).add(il.getPenaltyDot()).mult(-1.0); Matrix[] lhs = new Matrix[3]; lhs[0] = il.getJacobian(); lhs[1] = state.getMassInertia().inverse(); lhs[2] = il.getJacobian().transpose(); lambda = (new ConjugateGradient(lhs, rhs)).solve(); // Determine all Lagrange multipliers which are negative at resting contacts, // and remove them from the active set of constraints lambdaNegative = false; Set<Constraint> glue = new java.util.HashSet<Constraint>(); for (Constraint c : contacts) { int offset = il.getConstraintOffset(c); boolean thisNegative = false; for (int i=0; i<c.getDimension(); i++) if (lambda.getComponent(offset+i) < 0.0) thisNegative = true; if (thisNegative) { glue.add(c); lambdaNegative = true; } } constrs.removeAll(glue); contacts.removeAll(glue); } while (lambdaNegative); // Compute constraint forces from Lagrange multipliers, and apply them to the system Vector constForce; if (il.getJacobian().getRows() == lambda.getDimension()) { constForce = il.getJacobian().transpose().mult(lambda); } else { SparseMatrix.Slice[] slices = new SparseMatrix.Slice[1]; slices[0] = new SparseMatrix.SliceImpl(il.getJacobian().transpose(), 0, 0); SparseMatrix jac = new SparseMatrix(il.getJacobian().getColumns(), lambda.getDimension(), slices); constForce = jac.mult(lambda); } return state.applyForce(constForce); } private class DifferentialEquation implements ODE { InteractionList interactions; Vector lastCompleted = null; double lastCompletedTime, lastAddedTime = -1e20; int lastAddedFrame; public Vector derivative(double time, Vector state, boolean allowBacktrack) throws ODEBacktrackException { if (!(state instanceof StateVector)) throw new IllegalArgumentException(); StateVector sv = (StateVector) state; // Compute constraint/resting contact forces for (Interaction i : interactions.getNonConstraints()) sv = sv.handleInteraction(i); sv = constraintForces(sv, interactions); return sv.getDerivative(); } public Vector getInitial() { StateVector state = compoundBody.getInitialState(); if (ENABLE_CONTINUATION) state = state.load(new VectorImpl(CONTINUATION_FILE)); interactions = new InteractionList(); compoundBody.interaction(state, worldState, interactions, true); interactions.classifyConstraints(state); return state; } public Vector timeStep(double time, Vector state, boolean allowBacktrack) throws ODEBacktrackException { if (!(state instanceof StateVector)) throw new IllegalArgumentException(); StateVector sv = (StateVector) state; interactions = new InteractionList(); compoundBody.interaction(sv, worldState, interactions, true); interactions.classifyConstraints(sv); // Check if penetration has occurred -- may throw ODEBacktrackException if (allowBacktrack) checkPenetration(sv, interactions, time); // Compute constraint/collision impulses StateVector result = fudgeInequalities(sv, interactions); result = constraintImpulses(result, interactions, time); addToLog(time, state); writeLog("javadata", state.getDimension() + 1, log); /*DecimalFormat format = new DecimalFormat("######0.000000000000000"); List<String> currentState = new java.util.ArrayList<String>(); currentState.add(format.format(time) + " " + state.toString()); writeLog("currentState", state.getDimension() + 1, currentState);*/ return result; } private void addToLog(double time, Vector state) { if (time - lastAddedTime < 1.0/FRAMES_PER_SECOND) { lastCompleted = state; lastCompletedTime = time; return; } DecimalFormat format = new DecimalFormat("######0.000000000000000"); if (lastCompleted != null) { while (lastAddedTime + 1.0/FRAMES_PER_SECOND < time) { lastAddedFrame++; lastAddedTime = 1.0*lastAddedFrame/FRAMES_PER_SECOND; double dt = time - lastCompletedTime; Vector interpolated = lastCompleted.mult((time - lastAddedTime) / dt).add( state.mult((lastAddedTime - lastCompletedTime) / dt)); log.add(format.format(lastAddedTime) + " " + interpolated.toString()); } } else { lastAddedFrame = (int) Math.floor(time*FRAMES_PER_SECOND); lastAddedTime = 1.0*lastAddedFrame/FRAMES_PER_SECOND; log.add(format.format(lastAddedTime) + " " + state.toString()); } lastCompleted = state; lastCompletedTime = time; } } }
/* * Druid - a distributed column store. * Copyright 2012 - 2015 Metamarkets Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.druid.segment.incremental; import com.google.common.base.Supplier; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Maps; import com.metamx.common.ISE; import io.druid.data.input.InputRow; import io.druid.granularity.QueryGranularity; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicInteger; /** */ public class OnheapIncrementalIndex extends IncrementalIndex<Aggregator> { private final ConcurrentHashMap<Integer, Aggregator[]> aggregators = new ConcurrentHashMap<>(); private final ConcurrentNavigableMap<TimeAndDims, Integer> facts = new ConcurrentSkipListMap<>(); private final AtomicInteger indexIncrement = new AtomicInteger(0); protected final int maxRowCount; private String outOfRowsReason = null; public OnheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, boolean deserializeComplexMetrics, int maxRowCount ) { super(incrementalIndexSchema, deserializeComplexMetrics); this.maxRowCount = maxRowCount; } public OnheapIncrementalIndex( long minTimestamp, QueryGranularity gran, final AggregatorFactory[] metrics, boolean deserializeComplexMetrics, int maxRowCount ) { this( new IncrementalIndexSchema.Builder().withMinTimestamp(minTimestamp) .withQueryGranularity(gran) .withMetrics(metrics) .build(), deserializeComplexMetrics, maxRowCount ); } public OnheapIncrementalIndex( long minTimestamp, QueryGranularity gran, final AggregatorFactory[] metrics, int maxRowCount ) { this( new IncrementalIndexSchema.Builder().withMinTimestamp(minTimestamp) .withQueryGranularity(gran) .withMetrics(metrics) .build(), true, maxRowCount ); } public OnheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, int maxRowCount ) { this(incrementalIndexSchema, true, maxRowCount); } @Override public ConcurrentNavigableMap<TimeAndDims, Integer> getFacts() { return facts; } @Override protected DimDim makeDimDim(String dimension) { return new OnHeapDimDim(); } @Override protected Aggregator[] initAggs( AggregatorFactory[] metrics, Supplier<InputRow> rowSupplier, boolean deserializeComplexMetrics ) { return new Aggregator[metrics.length]; } @Override protected Integer addToFacts( AggregatorFactory[] metrics, boolean deserializeComplexMetrics, InputRow row, AtomicInteger numEntries, TimeAndDims key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier ) throws IndexSizeExceededException { final Integer priorIndex = facts.get(key); Aggregator[] aggs; if (null != priorIndex) { aggs = concurrentGet(priorIndex); } else { aggs = new Aggregator[metrics.length]; for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; aggs[i] = agg.factorize( makeColumnSelectorFactory(agg, rowSupplier, deserializeComplexMetrics) ); } final Integer rowIndex = indexIncrement.getAndIncrement(); concurrentSet(rowIndex, aggs); // Last ditch sanity checks if (numEntries.get() >= maxRowCount && !facts.containsKey(key)) { throw new IndexSizeExceededException("Maximum number of rows [%d] reached", maxRowCount); } final Integer prev = facts.putIfAbsent(key, rowIndex); if (null == prev) { numEntries.incrementAndGet(); } else { // We lost a race aggs = concurrentGet(prev); // Free up the misfire concurrentRemove(rowIndex); // This is expected to occur ~80% of the time in the worst scenarios } } rowContainer.set(row); for (Aggregator agg : aggs) { synchronized (agg) { agg.aggregate(); } } rowContainer.set(null); return numEntries.get(); } protected Aggregator[] concurrentGet(int offset) { // All get operations should be fine return aggregators.get(offset); } protected void concurrentSet(int offset, Aggregator[] value) { aggregators.put(offset, value); } protected void concurrentRemove(int offset) { aggregators.remove(offset); } @Override public boolean canAppendRow() { final boolean canAdd = size() < maxRowCount; if (!canAdd) { outOfRowsReason = String.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; } @Override public String getOutOfRowsReason() { return outOfRowsReason; } @Override protected Aggregator[] getAggsForRow(int rowOffset) { return concurrentGet(rowOffset); } @Override protected Object getAggVal(Aggregator agg, int rowOffset, int aggPosition) { return agg.get(); } @Override public float getMetricFloatValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].getFloat(); } @Override public long getMetricLongValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].getLong(); } @Override public Object getMetricObjectValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].get(); } private static class OnHeapDimDim implements DimDim { private final Map<String, Integer> falseIds; private final Map<Integer, String> falseIdsReverse; private volatile String[] sortedVals = null; final ConcurrentMap<String, String> poorMansInterning = Maps.newConcurrentMap(); public OnHeapDimDim() { BiMap<String, Integer> biMap = Maps.synchronizedBiMap(HashBiMap.<String, Integer>create()); falseIds = biMap; falseIdsReverse = biMap.inverse(); } /** * Returns the interned String value to allow fast comparisons using `==` instead of `.equals()` * * @see io.druid.segment.incremental.IncrementalIndexStorageAdapter.EntryHolderValueMatcherFactory#makeValueMatcher(String, String) */ public String get(String str) { String prev = poorMansInterning.putIfAbsent(str, str); return prev != null ? prev : str; } public int getId(String value) { final Integer id = falseIds.get(value); return id == null ? -1 : id; } public String getValue(int id) { return falseIdsReverse.get(id); } public boolean contains(String value) { return falseIds.containsKey(value); } public int size() { return falseIds.size(); } public synchronized int add(String value) { int id = falseIds.size(); falseIds.put(value, id); return id; } public int getSortedId(String value) { assertSorted(); return Arrays.binarySearch(sortedVals, value); } public String getSortedValue(int index) { assertSorted(); return sortedVals[index]; } public void sort() { if (sortedVals == null) { sortedVals = new String[falseIds.size()]; int index = 0; for (String value : falseIds.keySet()) { sortedVals[index++] = value; } Arrays.sort(sortedVals); } } private void assertSorted() { if (sortedVals == null) { throw new ISE("Call sort() before calling the getSorted* methods."); } } public boolean compareCannonicalValues(String s1, String s2) { return s1 == s2; } } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.cronet; import android.net.Uri; import android.os.ConditionVariable; import android.text.TextUtils; import android.util.Log; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.upstream.DataSourceException; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Predicate; import com.google.android.exoplayer2.util.SystemClock; import com.google.android.exoplayer2.util.TraceUtil; import java.io.IOException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.chromium.net.CronetEngine; import org.chromium.net.UrlRequest; import org.chromium.net.UrlRequestException; import org.chromium.net.UrlResponseInfo; /** * DataSource without intermediate buffer based on Cronet API set using UrlRequest. * <p>This class's methods are organized in the sequence of expected calls. */ public class CronetDataSource extends UrlRequest.Callback implements HttpDataSource { /** * Thrown when an error is encountered when trying to open a {@link CronetDataSource}. */ public static final class OpenException extends HttpDataSourceException { /** * Returns the status of the connection establishment at the moment when the error occurred, as * defined by {@link UrlRequest.Status}. */ public final int cronetConnectionStatus; public OpenException(IOException cause, DataSpec dataSpec, int cronetConnectionStatus) { super(cause, dataSpec, TYPE_OPEN); this.cronetConnectionStatus = cronetConnectionStatus; } public OpenException(String errorMessage, DataSpec dataSpec, int cronetConnectionStatus) { super(errorMessage, dataSpec, TYPE_OPEN); this.cronetConnectionStatus = cronetConnectionStatus; } } /** * The default connection timeout, in milliseconds. */ public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 8 * 1000; /** * The default read timeout, in milliseconds. */ public static final int DEFAULT_READ_TIMEOUT_MILLIS = 8 * 1000; private static final String TAG = "CronetDataSource"; private static final Pattern CONTENT_RANGE_HEADER_PATTERN = Pattern.compile("^bytes (\\d+)-(\\d+)/(\\d+)$"); // The size of read buffer passed to cronet UrlRequest.read(). private static final int READ_BUFFER_SIZE_BYTES = 32 * 1024; /* package */ static final int IDLE_CONNECTION = 5; /* package */ static final int OPENING_CONNECTION = 2; /* package */ static final int CONNECTED_CONNECTION = 3; /* package */ static final int OPEN_CONNECTION = 4; private final CronetEngine cronetEngine; private final Executor executor; private final Predicate<String> contentTypePredicate; private final TransferListener transferListener; private final int connectTimeoutMs; private final int readTimeoutMs; private final boolean resetTimeoutOnRedirects; private final Map<String, String> requestProperties; private final ConditionVariable operation; private final ByteBuffer readBuffer; private final Clock clock; private UrlRequest currentUrlRequest; private DataSpec currentDataSpec; private UrlResponseInfo responseInfo; /* package */ volatile int connectionState; private volatile String currentUrl; private volatile long currentConnectTimeoutMs; private volatile HttpDataSourceException exception; private volatile long contentLength; private volatile AtomicLong expectedBytesRemainingToRead; private volatile boolean hasData; private volatile boolean responseFinished; /** * @param cronetEngine A CronetEngine. * @param executor The {@link java.util.concurrent.Executor} that will perform the requests. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then an {@link InvalidContentTypeException} is thrown from * {@link #open(DataSpec)}. * @param transferListener A listener. */ public CronetDataSource(CronetEngine cronetEngine, Executor executor, Predicate<String> contentTypePredicate, TransferListener transferListener) { this(cronetEngine, executor, contentTypePredicate, transferListener, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS, false); } /** * @param cronetEngine A CronetEngine. * @param executor The {@link java.util.concurrent.Executor} that will perform the requests. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then an {@link InvalidContentTypeException} is thrown from * {@link #open(DataSpec)}. * @param transferListener A listener. * @param connectTimeoutMs The connection timeout, in milliseconds. * @param readTimeoutMs The read timeout, in milliseconds. * @param resetTimeoutOnRedirects Whether the connect timeout is reset when a redirect occurs. */ public CronetDataSource(CronetEngine cronetEngine, Executor executor, Predicate<String> contentTypePredicate, TransferListener transferListener, int connectTimeoutMs, int readTimeoutMs, boolean resetTimeoutOnRedirects) { this(cronetEngine, executor, contentTypePredicate, transferListener, connectTimeoutMs, readTimeoutMs, resetTimeoutOnRedirects, new SystemClock()); } /* package */ CronetDataSource(CronetEngine cronetEngine, Executor executor, Predicate<String> contentTypePredicate, TransferListener transferListener, int connectTimeoutMs, int readTimeoutMs, boolean resetTimeoutOnRedirects, Clock clock) { this.cronetEngine = Assertions.checkNotNull(cronetEngine); this.executor = Assertions.checkNotNull(executor); this.contentTypePredicate = contentTypePredicate; this.transferListener = transferListener; this.connectTimeoutMs = connectTimeoutMs; this.readTimeoutMs = readTimeoutMs; this.resetTimeoutOnRedirects = resetTimeoutOnRedirects; this.clock = Assertions.checkNotNull(clock); readBuffer = ByteBuffer.allocateDirect(READ_BUFFER_SIZE_BYTES); requestProperties = new HashMap<>(); operation = new ConditionVariable(); connectionState = IDLE_CONNECTION; } @Override public void setRequestProperty(String name, String value) { synchronized (requestProperties) { requestProperties.put(name, value); } } @Override public void clearRequestProperty(String name) { synchronized (requestProperties) { requestProperties.remove(name); } } @Override public void clearAllRequestProperties() { synchronized (requestProperties) { requestProperties.clear(); } } @Override public Map<String, List<String>> getResponseHeaders() { return responseInfo == null ? null : responseInfo.getAllHeaders(); } @Override public long open(DataSpec dataSpec) throws HttpDataSourceException { TraceUtil.beginSection("CronetDataSource.open"); try { Assertions.checkNotNull(dataSpec); synchronized (this) { Assertions.checkState(connectionState == IDLE_CONNECTION, "Connection already open"); connectionState = OPENING_CONNECTION; } operation.close(); resetConnectTimeout(); startRequest(dataSpec); boolean requestStarted = blockUntilConnectTimeout(); if (exception != null) { // An error occurred opening the connection. throw exception; } else if (!requestStarted) { // The timeout was reached before the connection was opened. throw new OpenException(new SocketTimeoutException(), dataSpec, getCurrentRequestStatus()); } // Connection was opened. if (transferListener != null) { transferListener.onTransferStart(this, dataSpec); } connectionState = OPEN_CONNECTION; return contentLength; } finally { TraceUtil.endSection(); } } private void startRequest(DataSpec dataSpec) throws HttpDataSourceException { currentUrl = dataSpec.uri.toString(); currentDataSpec = dataSpec; UrlRequest.Builder urlRequestBuilder = new UrlRequest.Builder(currentUrl, this, executor, cronetEngine); fillCurrentRequestHeader(urlRequestBuilder); fillCurrentRequestPostBody(urlRequestBuilder, dataSpec); currentUrlRequest = urlRequestBuilder.build(); currentUrlRequest.start(); } private void fillCurrentRequestHeader(UrlRequest.Builder urlRequestBuilder) { synchronized (requestProperties) { for (Entry<String, String> headerEntry : requestProperties.entrySet()) { urlRequestBuilder.addHeader(headerEntry.getKey(), headerEntry.getValue()); } } if (currentDataSpec.position == 0 && currentDataSpec.length == C.LENGTH_UNSET) { // Not required. return; } StringBuilder rangeValue = new StringBuilder(); rangeValue.append("bytes="); rangeValue.append(currentDataSpec.position); rangeValue.append("-"); if (currentDataSpec.length != C.LENGTH_UNSET) { rangeValue.append(currentDataSpec.position + currentDataSpec.length - 1); } urlRequestBuilder.addHeader("Range", rangeValue.toString()); } private void fillCurrentRequestPostBody(UrlRequest.Builder urlRequestBuilder, DataSpec dataSpec) throws HttpDataSourceException { if (dataSpec.postBody != null) { if (!requestProperties.containsKey("Content-Type")) { throw new OpenException("POST requests must set a Content-Type header", dataSpec, getCurrentRequestStatus()); } urlRequestBuilder.setUploadDataProvider( new ByteArrayUploadDataProvider(dataSpec.postBody), executor); } } @Override public synchronized void onFailed( UrlRequest request, UrlResponseInfo info, UrlRequestException error) { if (request != currentUrlRequest) { return; } if (connectionState == OPENING_CONNECTION) { IOException cause = error.getErrorCode() == UrlRequestException.ERROR_HOSTNAME_NOT_RESOLVED ? new UnknownHostException() : error; exception = new OpenException(cause, currentDataSpec, getCurrentRequestStatus()); } else if (connectionState == OPEN_CONNECTION) { readBuffer.limit(0); exception = new HttpDataSourceException(error, currentDataSpec, HttpDataSourceException.TYPE_READ); } operation.open(); } @Override public synchronized void onResponseStarted(UrlRequest request, UrlResponseInfo info) { if (request != currentUrlRequest) { return; } TraceUtil.beginSection("CronetDataSource.onResponseStarted"); try { validateResponse(info); responseInfo = info; // Check content length. contentLength = getContentLength(info.getAllHeaders()); // If a specific length is requested and a specific length is returned but the 2 don't match // it's an error. if (currentDataSpec.length != C.LENGTH_UNSET && contentLength != C.LENGTH_UNSET && currentDataSpec.length != contentLength) { throw new OpenException("Content length did not match requested length", currentDataSpec, getCurrentRequestStatus()); } if (contentLength > 0) { expectedBytesRemainingToRead = new AtomicLong(contentLength); } // Keep track of redirects. currentUrl = responseInfo.getUrl(); connectionState = CONNECTED_CONNECTION; } catch (HttpDataSourceException e) { exception = e; } finally { operation.open(); TraceUtil.endSection(); } } private void validateResponse(UrlResponseInfo info) throws HttpDataSourceException { // Check for a valid response code. int responseCode = info.getHttpStatusCode(); if (responseCode < 200 || responseCode > 299) { InvalidResponseCodeException exception = new InvalidResponseCodeException( responseCode, info.getAllHeaders(), currentDataSpec); if (responseCode == 416) { exception.initCause(new DataSourceException(DataSourceException.POSITION_OUT_OF_RANGE)); } throw exception; } // Check for a valid content type. try { String contentType = info.getAllHeaders().get("Content-Type").get(0); if (contentTypePredicate != null && !contentTypePredicate.evaluate(contentType)) { throw new InvalidContentTypeException(contentType, currentDataSpec); } } catch (IndexOutOfBoundsException e) { throw new InvalidContentTypeException(null, currentDataSpec); } } private long getContentLength(Map<String, List<String>> headers) { // Logic copied from {@code DefaultHttpDataSource} long contentLength = C.LENGTH_UNSET; List<String> contentLengthHeader = headers.get("Content-Length"); if (contentLengthHeader != null && !contentLengthHeader.isEmpty() && !TextUtils.isEmpty(contentLengthHeader.get(0))) { try { contentLength = Long.parseLong(contentLengthHeader.get(0)); } catch (NumberFormatException e) { log(Log.ERROR, "Unexpected Content-Length [" + contentLengthHeader + "]"); } } List<String> contentRangeHeader = headers.get("Content-Range"); if (contentRangeHeader != null && !contentRangeHeader.isEmpty() && !TextUtils.isEmpty(contentRangeHeader.get(0))) { Matcher matcher = CONTENT_RANGE_HEADER_PATTERN.matcher(contentRangeHeader.get(0)); if (matcher.find()) { try { long contentLengthFromRange = Long.parseLong(matcher.group(2)) - Long.parseLong(matcher.group(1)) + 1; if (contentLength < 0) { // Some proxy servers strip the Content-Length header. Fall back to the length // calculated here in this case. contentLength = contentLengthFromRange; } else if (contentLength != contentLengthFromRange) { // If there is a discrepancy between the Content-Length and Content-Range headers, // assume the one with the larger value is correct. We have seen cases where carrier // change one of them to reduce the size of a request, but it is unlikely anybody // would increase it. log(Log.WARN, "Inconsistent headers [" + contentLengthHeader + "] [" + contentRangeHeader + "]"); contentLength = Math.max(contentLength, contentLengthFromRange); } } catch (NumberFormatException e) { log(Log.ERROR, "Unexpected Content-Range [" + contentRangeHeader + "]"); } } } return contentLength; } @Override public int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException { TraceUtil.beginSection("CronetDataSource.read"); try { synchronized (this) { if (connectionState != OPEN_CONNECTION) { throw new IllegalStateException("Connection not ready"); } } // If being asked to read beyond the amount of bytes initially requested, return // RESULT_END_OF_INPUT. if (expectedBytesRemainingToRead != null && expectedBytesRemainingToRead.get() <= 0) { return C.RESULT_END_OF_INPUT; } if (!hasData) { // Read more data from cronet. operation.close(); currentUrlRequest.read(readBuffer); if (!operation.block(readTimeoutMs)) { throw new HttpDataSourceException( new SocketTimeoutException(), currentDataSpec, HttpDataSourceException.TYPE_READ); } if (exception != null) { throw exception; } // The expected response length is unknown, but cronet has indicated that the request // already finished successfully. if (responseFinished) { return C.RESULT_END_OF_INPUT; } } int bytesRead = Math.min(readBuffer.remaining(), readLength); readBuffer.get(buffer, offset, bytesRead); if (!readBuffer.hasRemaining()) { readBuffer.clear(); hasData = false; } if (expectedBytesRemainingToRead != null) { expectedBytesRemainingToRead.addAndGet(-bytesRead); } if (transferListener != null && bytesRead >= 0) { transferListener.onBytesTransferred(this, bytesRead); } return bytesRead; } finally { TraceUtil.endSection(); } } @Override public void onRedirectReceived(UrlRequest request, UrlResponseInfo info, String newLocationUrl) { if (request != currentUrlRequest) { return; } if (currentDataSpec.postBody != null) { int responseCode = info.getHttpStatusCode(); // The industry standard is to disregard POST redirects when the status code is 307 or 308. // For other redirect response codes the POST request is converted to a GET request and the // redirect is followed. if (responseCode == 307 || responseCode == 308) { exception = new OpenException("POST request redirected with 307 or 308 response code", currentDataSpec, getCurrentRequestStatus()); operation.open(); return; } } if (resetTimeoutOnRedirects) { resetConnectTimeout(); } request.followRedirect(); } @Override public synchronized void onReadCompleted(UrlRequest request, UrlResponseInfo info, ByteBuffer buffer) { if (request != currentUrlRequest) { return; } readBuffer.flip(); if (readBuffer.limit() > 0) { hasData = true; } operation.open(); } @Override public void onSucceeded(UrlRequest request, UrlResponseInfo info) { if (request != currentUrlRequest) { return; } responseFinished = true; operation.open(); } @Override public synchronized void close() { TraceUtil.beginSection("CronetDataSource.close"); try { if (currentUrlRequest != null) { currentUrlRequest.cancel(); currentUrlRequest = null; } readBuffer.clear(); currentDataSpec = null; currentUrl = null; exception = null; contentLength = 0; hasData = false; responseInfo = null; expectedBytesRemainingToRead = null; responseFinished = false; if (transferListener != null && connectionState == OPEN_CONNECTION) { transferListener.onTransferEnd(this); } } finally { connectionState = IDLE_CONNECTION; TraceUtil.endSection(); } } @Override public Uri getUri() { return Uri.parse(currentUrl); } private void log(int priority, String message) { if (Log.isLoggable(TAG, priority)) { Log.println(priority, TAG, message); } } private int getCurrentRequestStatus() { if (currentUrlRequest == null) { return UrlRequest.Status.IDLE; } final ConditionVariable conditionVariable = new ConditionVariable(); final AtomicInteger result = new AtomicInteger(); currentUrlRequest.getStatus(new UrlRequest.StatusListener() { @Override public void onStatus(int status) { result.set(status); conditionVariable.open(); } }); return result.get(); } private boolean blockUntilConnectTimeout() { long now = clock.elapsedRealtime(); boolean opened = false; while (!opened && now < currentConnectTimeoutMs) { opened = operation.block(currentConnectTimeoutMs - now + 5 /* fudge factor */); now = clock.elapsedRealtime(); } return opened; } private void resetConnectTimeout() { currentConnectTimeoutMs = clock.elapsedRealtime() + connectTimeoutMs; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.testutil; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; import static com.google.devtools.build.lib.packages.BuildType.OUTPUT_LIST; import static com.google.devtools.build.lib.packages.Type.INTEGER; import static com.google.devtools.build.lib.packages.Type.STRING_LIST; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.CommonPrerequisiteValidator; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.PlatformConfiguration; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; import com.google.devtools.build.lib.analysis.RunfilesProvider; import com.google.devtools.build.lib.analysis.TemplateVariableInfo; import com.google.devtools.build.lib.analysis.config.CoreOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.rules.config.ConfigRules; import com.google.devtools.build.lib.rules.core.CoreRules; import com.google.devtools.build.lib.rules.platform.PlatformRules; import com.google.devtools.build.lib.util.FileTypeSet; import java.lang.reflect.Method; import java.util.Map; import net.starlark.java.syntax.Location; /** Helper class to provide a RuleClassProvider for tests. */ public class TestRuleClassProvider { private static ConfiguredRuleClassProvider ruleClassProvider = null; private static ConfiguredRuleClassProvider ruleClassProviderWithClearedSuffix = null; private TestRuleClassProvider() {} /** Adds all the rule classes supported internally within the build tool to the given builder. */ public static void addStandardRules(ConfiguredRuleClassProvider.Builder builder) { try { Class<?> providerClass = Class.forName(TestConstants.TEST_RULE_CLASS_PROVIDER); Method setupMethod = providerClass.getMethod("setup", ConfiguredRuleClassProvider.Builder.class); setupMethod.invoke(null, builder); } catch (Exception e) { throw new IllegalStateException(e); } } private static ConfiguredRuleClassProvider createRuleClassProvider(boolean clearSuffix) { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); addStandardRules(builder); // TODO(b/174773026): Eliminate TestingDummyRule/MockToolchainRule from this class, push them // down into the tests that use them. It's better for tests to avoid spooky mocks at a distance. // The same might also be said for the cleared-workspace variant of getRuleClassProvider(). If // we eliminate both, TestRuleClassProvider probably doesn't need to exist anymore. builder.addRuleDefinition(new TestingDummyRule()); builder.addRuleDefinition(new MockToolchainRule()); if (clearSuffix) { builder.clearWorkspaceFileSuffixForTesting(); } return builder.build(); } /** Returns a rule class provider. */ public static ConfiguredRuleClassProvider getRuleClassProvider() { if (ruleClassProvider == null) { ruleClassProvider = createRuleClassProvider(false); } return ruleClassProvider; } /** Returns a rule class provider with the workspace suffix cleared. */ public static ConfiguredRuleClassProvider getRuleClassProviderWithClearedSuffix() { if (ruleClassProviderWithClearedSuffix == null) { ruleClassProviderWithClearedSuffix = createRuleClassProvider(true); } return ruleClassProviderWithClearedSuffix; } // TODO(bazel-team): The logic for the "minimal" rule class provider is currently split between // TestRuleClassProvider and BuiltinsInjectionTest's overrides of BuildViewTestCase setup helpers. // Consider refactoring this together into one place as a new MinimalAnalysisMock. /** * Adds a few essential rules to a builder, such that it is usable but does not contain all the * rule classes known to the production environment. */ public static void addMinimalRules(ConfiguredRuleClassProvider.Builder builder) { // TODO(bazel-team): See also TrimmableTestConfigurationFragments#installFragmentsAndNativeRules // for alternative/additional setup. Consider factoring that one to use this method. builder .setToolsRepository("@") .setRunfilesPrefix("test") .setPrerequisiteValidator(new MinimalPrerequisiteValidator()); CoreRules.INSTANCE.init(builder); builder.addConfigurationOptions(CoreOptions.class); PlatformRules.INSTANCE.init(builder); ConfigRules.INSTANCE.init(builder); } private static class MinimalPrerequisiteValidator extends CommonPrerequisiteValidator { @Override public boolean isSameLogicalPackage( PackageIdentifier thisPackage, PackageIdentifier prerequisitePackage) { return thisPackage.equals(prerequisitePackage); } @Override protected boolean packageUnderExperimental(PackageIdentifier packageIdentifier) { return false; } @Override protected boolean checkVisibilityForExperimental(RuleContext.Builder context) { // It does not matter whether we return true or false here if packageUnderExperimental always // returns false. return true; } @Override protected boolean allowExperimentalDeps(RuleContext.Builder context) { // It does not matter whether we return true or false here if packageUnderExperimental always // returns false. return false; } } /** A dummy rule with some dummy attributes. */ public static final class TestingDummyRule implements RuleDefinition { @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) { return builder .setUndocumented() .add(attr("srcs", LABEL_LIST).allowedFileTypes(FileTypeSet.ANY_FILE)) .add(attr("outs", OUTPUT_LIST)) .add(attr("dummystrings", STRING_LIST)) .add(attr("dummyinteger", INTEGER)) .build(); } @Override public Metadata getMetadata() { return RuleDefinition.Metadata.builder() .name("testing_dummy_rule") .ancestors(BaseRuleClasses.RuleBase.class) .factoryClass(UnknownRuleConfiguredTarget.class) .build(); } } /** Stub rule to test Make variable expansion. */ public static final class MakeVariableTester implements RuleConfiguredTargetFactory { @Override public ConfiguredTarget create(RuleContext ruleContext) throws InterruptedException, RuleErrorException, ActionConflictException { Map<String, String> variables = ruleContext.attributes().get("variables", Type.STRING_DICT); return new RuleConfiguredTargetBuilder(ruleContext) .setFilesToBuild(NestedSetBuilder.emptySet(Order.STABLE_ORDER)) .addProvider(RunfilesProvider.EMPTY) .addNativeDeclaredProvider( new TemplateVariableInfo(ImmutableMap.copyOf(variables), Location.BUILTIN)) .build(); } } /** Definition of a stub rule to test Make variable expansion. */ public static final class MakeVariableTesterRule implements RuleDefinition { @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment environment) { return builder .advertiseProvider(TemplateVariableInfo.class) .add(attr("variables", Type.STRING_DICT)) .build(); } @Override public Metadata getMetadata() { return Metadata.builder() .name("make_variable_tester") .ancestors( BaseRuleClasses.BaseRule.class, BaseRuleClasses.MakeVariableExpandingRule.class) .factoryClass(MakeVariableTester.class) .build(); } } /** A mock rule that requires a toolchain. */ public static class MockToolchainRule implements RuleDefinition { @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) { return builder .requiresConfigurationFragments(PlatformConfiguration.class) .addRequiredToolchains( ImmutableList.of(Label.parseAbsoluteUnchecked("//toolchain:test_toolchain"))) .build(); } @Override public Metadata getMetadata() { return RuleDefinition.Metadata.builder() .name("mock_toolchain_rule") .factoryClass(UnknownRuleConfiguredTarget.class) .ancestors(BaseRuleClasses.RuleBase.class) .build(); } } }
// Copyright 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.googlecodelabs.example.backupexample; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.content.Intent; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import androidx.annotation.Nullable; import androidx.appcompat.app.AppCompatActivity; import android.view.KeyEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; import android.widget.AutoCompleteTextView; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; /** * A login screen that offers login via email/password. */ public class LoginActivity extends AppCompatActivity { /** * A dummy authentication store containing known user names and passwords. */ private static final String[] DUMMY_CREDENTIALS = new String[]{ "foo@example.com:hello", "bar@example.com:world" }; /** * Keep track of the login task to ensure we can cancel it if requested. */ private UserLoginTask mAuthTask = null; // UI references. private AutoCompleteTextView mEmailView; private EditText mPasswordView; private View mProgressView; private View mLoginFormView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_login); // Set up the login form. mEmailView = findViewById(R.id.email); mPasswordView = findViewById(R.id.password); mPasswordView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int id, KeyEvent keyEvent) { if (id == EditorInfo.IME_ACTION_DONE || id == EditorInfo.IME_NULL) { attemptLogin(); return true; } return false; } }); Button mEmailSignInButton = findViewById(R.id.email_sign_in_button); mEmailSignInButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptLogin(); } }); mLoginFormView = findViewById(R.id.login_form); mProgressView = findViewById(R.id.login_progress); if (!PrefUtils.needsLogin(this)) { startActivity(new Intent(this, LoggedInActivity.class)); finish(); } overlayDebug((TextView) findViewById(R.id.overlay)); //******************* Implement Login Hinting ***************** //******************* End implement Login Hinting ************* } private void overlayDebug(@Nullable TextView overlay) { if (overlay == null) { return; } overlay.setText(PrefUtils.getDebugText(this)); } /** * Attempts to sign in or register the account specified by the login form. * If there are form errors (invalid email, missing fields, etc.), the * errors are presented and no actual login attempt is made. */ private void attemptLogin() { if (mAuthTask != null) { return; } // Reset errors. mEmailView.setError(null); mPasswordView.setError(null); // Store values at the time of the login attempt. String email = mEmailView.getText().toString(); String password = mPasswordView.getText().toString(); // Show a progress spinner, and kick off a background task to // perform the user login attempt. showProgress(true); mAuthTask = new UserLoginTask(email, password); mAuthTask.execute((Void) null); } /** * Shows the progress UI and hides the login form. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) private void showProgress(final boolean show) { // On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow // for very easy animations. If available, use these APIs to fade-in // the progress spinner. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); mLoginFormView.animate().setDuration(shortAnimTime).alpha( show ? 0 : 1).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } }); mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mProgressView.animate().setDuration(shortAnimTime).alpha( show ? 1 : 0).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); } }); } else { // The ViewPropertyAnimator APIs are not available, so simply show // and hide the relevant UI components. mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } } /** * Represents an asynchronous login/registration task used to authenticate * the user. */ public class UserLoginTask extends AsyncTask<Void, Void, Boolean> { private final String mEmail; private final String mPassword; UserLoginTask(String email, String password) { mEmail = email; mPassword = password; } @Override protected Boolean doInBackground(Void... params) { // TODO: attempt authentication against a network service. try { // Simulate network access. Thread.sleep(2000); } catch (InterruptedException e) { return false; } for (String credential : DUMMY_CREDENTIALS) { String[] pieces = credential.split(":"); if (pieces[0].equals(mEmail)) { // Account exists, return true if the password matches. if (pieces[1].equals(mPassword)) { // TODO: register the new account here and get back an authKey return true; } } } return false; } @Override protected void onPostExecute(final Boolean success) { mAuthTask = null; showProgress(false); if (success) { finishLogin(mEmail, mPassword); } else { mPasswordView.setError(getString(R.string.error_incorrect_password)); mPasswordView.requestFocus(); } } @Override protected void onCancelled() { mAuthTask = null; showProgress(false); } } private void finishLogin(String email, String password) { // For the sake of this example, we assume that the authKey is the password // Note: You should always ensure that usernames/passwords should be excluded // from backup for security. PrefUtils.setLoginAccount(LoginActivity.this, email, password); startActivity(new Intent(this, LoggedInActivity.class)); finish(); } }
/******************************************************************************* * Copyright (c) 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.bpel.ui.extensions; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.bpel.ui.BPELUIPlugin; import org.eclipse.bpel.ui.IBPELUIConstants; import org.eclipse.bpel.ui.IHoverHelper; import org.eclipse.bpel.ui.Messages; import org.eclipse.bpel.ui.bpelactions.AbstractBPELAction; import org.eclipse.bpel.ui.expressions.DefaultExpressionEditor; import org.eclipse.bpel.ui.expressions.IExpressionEditor; import org.eclipse.bpel.ui.factories.AbstractUIObjectFactory; import org.eclipse.bpel.ui.util.BPELUtil; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtensionPoint; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Status; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.ecore.EClass; /** * Responsible for getting information from the BPEL UI extension points. */ public class BPELUIRegistry { static final String EXTPT_HOVERHELPERS = "hoverHelpers"; //$NON-NLS-1$ static final String ELEMENT_HOVERHELPER = "hoverHelper"; //$NON-NLS-1$ static final String EXTPT_EXPRESSION_EDITORS = "expressionEditors"; //$NON-NLS-1$ static final String ELEMENT_EDITOR = "editor"; //$NON-NLS-1$ static final String ATT_EXPRESSION_LANGUAGE = "expressionLanguage"; //$NON-NLS-1$ static final String ATT_CLASS = "class"; //$NON-NLS-1$ static final String ATT_LABEL = "label"; //$NON-NLS-1$ static final String EXTPT_ACTIONS = "actions"; //$NON-NLS-1$ static final String ELEMENT_CATEGORY = "category"; //$NON-NLS-1$ static final String ATT_NAME = "name"; //$NON-NLS-1$ static final String ATT_ID = "id"; //$NON-NLS-1$ static final String ELEMENT_ACTION = "action"; //$NON-NLS-1$ static final String ATT_CATEGORY_ID = "categoryId"; //$NON-NLS-1$ static final String EXTPT_MODELLISTENER = "modelListener"; //$NON-NLS-1$ static final String ELEMENT_LISTENER = "listener"; //$NON-NLS-1$ static final String ATT_SPEC_COMPLIANT = "specCompliant"; //$NON-NLS-1$ static final ExpressionEditorDescriptor[] EMPTY_EDITOR_DESCRIPTORS = {}; private static BPELUIRegistry instance; private Map<String,ExpressionEditorDescriptor> fLanguageToEditorDescriptor; private HoverHelperDescriptor hoverHelperDescriptor; private ActionCategoryDescriptor[] fActionCategoryDescriptors; private ActionDescriptor[] fActionDescriptors; private ListenerDescriptor[] fListenerDescriptors; private UIObjectFactoryDescriptor[] uiObjectFactoryDescriptor; private IHoverHelper hoverHelper; private BPELUIRegistry() { readExpressionLanguageEditors(); readHoverHelpers(); readActions(); readListeners(); readUIObjecFactories(); } /** * @return the singleton instance of this regitry. */ public static BPELUIRegistry getInstance() { if (instance == null) { instance = new BPELUIRegistry(); } return instance; } /** * Return the hover helper. * @return the hover helper extension. * @throws CoreException */ public IHoverHelper getHoverHelper() throws CoreException { if (hoverHelperDescriptor == null) { return null; } if (hoverHelper == null) { hoverHelper = hoverHelperDescriptor.createHoverHelper(); } return hoverHelper; } /** * Returns an expression editor for the given expression language. * @param expressionLanguage * @return the IExpression editor for the given expression language. * @throws CoreException */ public IExpressionEditor getExpressionEditor (String expressionLanguage) throws CoreException { ExpressionEditorDescriptor descriptor = fLanguageToEditorDescriptor.get(expressionLanguage); if (descriptor == null) { return new DefaultExpressionEditor(); } IExpressionEditor editor = descriptor.createEditor(); return editor; } /** * Returns an expression editor descriptor for the given expression language. * * @param expressionLanguage * @return the expression language descriptor for the given expression language. */ public ExpressionEditorDescriptor getExpressionEditorDescriptor(String expressionLanguage) { return fLanguageToEditorDescriptor.get(expressionLanguage); } private void readExpressionLanguageEditors() { fLanguageToEditorDescriptor = new HashMap<String,ExpressionEditorDescriptor>(); for(IConfigurationElement editor : getConfigurationElements(EXTPT_EXPRESSION_EDITORS) ) { if (editor.getName().equals(ELEMENT_EDITOR)) { String language = editor.getAttribute(ATT_EXPRESSION_LANGUAGE); String clazz = editor.getAttribute(ATT_CLASS); if (language == null || clazz == null) { String pluginId = BPELUIPlugin.INSTANCE.getBundle().getSymbolicName(); IStatus status = new Status(IStatus.ERROR, pluginId, IBPELUIConstants.MISSING_ATTRIBUTE, Messages.BPELUIRegistry_Expression_language_editors_must_provide_expressionLanguage_and_class__8, null); BPELUIPlugin.INSTANCE.getLog().log(status); } else { ExpressionEditorDescriptor descriptor = new ExpressionEditorDescriptor(); descriptor.setExpressionLanguage(language); descriptor.setElement(editor); String label = editor.getAttribute(ATT_LABEL); descriptor.setLabel(label); fLanguageToEditorDescriptor.put(language, descriptor); } } } } /** * Return the UIObjectFactory descriptors */ public UIObjectFactoryDescriptor[] getUIObjectFactoryDescriptors() { return uiObjectFactoryDescriptor; } /** * Return all action descriptors. * @return Return all action descriptors. */ public ActionDescriptor[] getActionDescriptors() { return fActionDescriptors; } /** * Returns the ActionDescriptor for the given EClass. * @param target the target * @return Returns the ActionDescriptor for the given EClass. */ public ActionDescriptor getActionDescriptor(EClass target) { for(ActionDescriptor descriptor : fActionDescriptors ) { if (descriptor.getAction().getModelType() == target) { return descriptor; } } return null; } /** * @return the action category descriptors. */ public ActionCategoryDescriptor[] getActionCategoryDescriptors() { return fActionCategoryDescriptors; } /** * @return the listener descriptors. */ public ListenerDescriptor[] getListenerDescriptors() { return fListenerDescriptors; } private void readHoverHelpers() { for (IConfigurationElement helper : getConfigurationElements(EXTPT_HOVERHELPERS) ) { if (helper.getName().equals(ELEMENT_HOVERHELPER) == false) { continue; } String clazz = helper.getAttribute(ATT_CLASS); if (clazz == null) { continue; } HoverHelperDescriptor descriptor = new HoverHelperDescriptor(); descriptor.setElement(helper); this.hoverHelperDescriptor = descriptor; } } /** * Read all the actions and categories. */ private void readActions() { List<ActionCategoryDescriptor> categories = new ArrayList<ActionCategoryDescriptor>(); List<ActionDescriptor> actions = new ArrayList<ActionDescriptor>(); for (IConfigurationElement element : getConfigurationElements(EXTPT_ACTIONS)) { if (element.getName().equals(ELEMENT_CATEGORY)) { String name = element.getAttribute(ATT_NAME); String id = element.getAttribute(ATT_ID); if (name != null && id != null) { ActionCategoryDescriptor descriptor = new ActionCategoryDescriptor(); descriptor.setName(name); descriptor.setId(id); categories.add(descriptor); } } else if (element.getName().equals(ELEMENT_ACTION)) { String id = element.getAttribute(ATT_ID); String category = element.getAttribute(ATT_CATEGORY_ID); String specCompliant = element.getAttribute(ATT_SPEC_COMPLIANT); if (category != null && id != null) { ActionDescriptor descriptor = new ActionDescriptor(); descriptor.setId(id); descriptor.setCategoryId(category); descriptor.setSpecCompliant(Boolean.valueOf(specCompliant).booleanValue()); try { AbstractBPELAction action = (AbstractBPELAction)element.createExecutableExtension(ATT_CLASS); descriptor.setAction(action); } catch (CoreException e) { BPELUIPlugin.log(e); } actions.add(descriptor); // register AdapterFactory - since it has to be done only once we do it here AdapterFactory factory = descriptor.getAction().getAdapterFactory(); if (factory != null) { BPELUtil.registerAdapterFactory(descriptor.getAction().getModelType(), factory); } } } } fActionCategoryDescriptors = new ActionCategoryDescriptor[categories.size()]; categories.toArray(fActionCategoryDescriptors); fActionDescriptors = new ActionDescriptor[actions.size()]; actions.toArray(fActionDescriptors); } /** * Read all the actions and categories. */ private void readUIObjecFactories() { List factories = new ArrayList(); IConfigurationElement[] extensions = getConfigurationElements("uiObjectFactories"); for (int i = 0; i < extensions.length; i++) { IConfigurationElement element = extensions[i]; if (element.getName().equals("factory")) { String id = element.getAttribute(ATT_ID); String category = element.getAttribute(ATT_CATEGORY_ID); String specCompliant = element.getAttribute(ATT_SPEC_COMPLIANT); if (category != null && id != null) { UIObjectFactoryDescriptor descriptor = new UIObjectFactoryDescriptor(); descriptor.setId(id); descriptor.setCategoryId(category); descriptor.setSpecCompliant(Boolean.valueOf(specCompliant).booleanValue()); try { AbstractUIObjectFactory factory = (AbstractUIObjectFactory) element.createExecutableExtension(ATT_CLASS); descriptor.setFactory(factory); descriptor.setConfigElement(element); } catch (CoreException e) { BPELUIPlugin.log(e); } factories.add(descriptor); } } } uiObjectFactoryDescriptor = new UIObjectFactoryDescriptor[factories.size()]; factories.toArray(uiObjectFactoryDescriptor); } /** * Read all the model listeners */ private void readListeners() { List<ListenerDescriptor> listeners = new ArrayList<ListenerDescriptor>(); for (IConfigurationElement element : getConfigurationElements(EXTPT_MODELLISTENER)) { if (element.getName().equals(ELEMENT_LISTENER)) { String id = element.getAttribute(ATT_ID); if (id != null) { ListenerDescriptor descriptor = new ListenerDescriptor(); descriptor.setId(id); try { IModelListener listener = (IModelListener)element.createExecutableExtension(ATT_CLASS); descriptor.setModelListener(listener); } catch (CoreException e) { BPELUIPlugin.log(e); } listeners.add(descriptor); } } } fListenerDescriptors = new ListenerDescriptor[listeners.size()]; listeners.toArray(fListenerDescriptors); } /** * Given an extension point name returns its configuration elements. */ private IConfigurationElement[] getConfigurationElements(String extensionPointId) { IExtensionPoint extensionPoint = Platform.getExtensionRegistry().getExtensionPoint( BPELUIPlugin.PLUGIN_ID, extensionPointId); if (extensionPoint == null) { return null; } return extensionPoint.getConfigurationElements(); } /** * @return an array of ExpressionEditorDescriptor values. */ public ExpressionEditorDescriptor[] getExpressionEditorDescriptors() { return fLanguageToEditorDescriptor.values().toArray( EMPTY_EDITOR_DESCRIPTORS ); } }
package io.tradle.joe.sharing; import io.netty.handler.codec.http.QueryStringEncoder; import io.netty.util.CharsetUtil; import io.tradle.joe.Config; import io.tradle.joe.Joe; import io.tradle.joe.TransactionData; import io.tradle.joe.TransactionDataType; import io.tradle.joe.events.KeyValue; import io.tradle.joe.exceptions.StorageException; import io.tradle.joe.utils.AESUtils; import io.tradle.joe.utils.ECUtils; import io.tradle.joe.utils.Gsons; import io.tradle.joe.utils.HttpResponseData; import io.tradle.joe.utils.TransactionUtils; import io.tradle.joe.utils.Utils; import java.math.BigInteger; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.codec.binary.Base64; import org.bitcoinj.core.AddressFormatException; import org.bitcoinj.core.Base58; import org.bitcoinj.core.ECKey; import org.bitcoinj.core.NetworkParameters; import org.bitcoinj.core.Transaction; import org.bitcoinj.core.TransactionInput; import org.bitcoinj.core.TransactionOutput; import org.bitcoinj.core.Wallet; import org.bitcoinj.wallet.DecryptingKeyBag; import org.bitcoinj.wallet.KeyBag; import org.h2.security.SHA256; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.crypto.params.KeyParameter; import org.spongycastle.util.encoders.Hex; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonParser; public class StoragePipe { private static final Logger logger = LoggerFactory.getLogger(StoragePipe.class); public static final Charset FILE_ENCODING = CharsetUtil.UTF_8; private final Wallet wallet; private final NetworkParameters params; private final JsonParser jsonParser; public StoragePipe(Wallet wallet) { this.wallet = wallet; jsonParser = new JsonParser(); params = wallet.getNetworkParameters(); } public KeyValue receiveData(Transaction tx) { TransactionData data = TransactionUtils.getDataFromTransaction(tx); if (data == null) return null; return receiveData(tx, data); } public List<KeyValue> receiveData(List<Transaction> txs) { int numTxs = txs.size(); List<TransactionData> tData = new ArrayList<TransactionData>(); List<String> hashes = new ArrayList<String>(); // build list of keys to batch query keeper for: StringBuilder intermediateKeys = new StringBuilder(); for (Transaction t: txs) { TransactionData td = TransactionUtils.getDataFromTransaction(t); tData.add(td); String keyString = keyToString(td.data()); hashes.add(keyString); intermediateKeys.append(keyString); intermediateKeys.append(","); } if (intermediateKeys.length() == 0) return null; JsonArray iFiles = fetchFiles(intermediateKeys.substring(0, intermediateKeys.length() - 1)); List<String> decryptionKeys = new ArrayList<String>(); // go through retrieved data and find decryption keys from intermediate files: for (int i = 0; i < numTxs; i++) { String iFile = iFiles.get(i).getAsString(); String dKey = null; if (iFile != null) { Transaction t = txs.get(i); if (tData.get(i).type() == TransactionDataType.ENCRYPTED_SHARE) { PermissionFileData iData = getIntermediateFileData(t, iFile); // byte[] fileHash = keyToBytes(iData.fileHash()); hashes.set(i, iData.fileHash()); dKey = iData.decryptionKey(); } } decryptionKeys.add(dKey); } // for files that were just intermediate files, fetch the actual file they point to: StringBuilder keys = new StringBuilder(); for (int i = 0; i < numTxs; i++) { String dKey = decryptionKeys.get(i); if (dKey != null) { keys.append(hashes.get(i)); keys.append(","); } } JsonArray files = null; if (keys.length() != 0) files = fetchFiles(keys.substring(0, keys.length() - 1)); // merge the cleartext-stored files with the encrypted files to preserve order by transaction time List<KeyValue> data = new ArrayList<KeyValue>(); for (int i = 0, j = files.size(); i < numTxs; i++) { String dKey = decryptionKeys.get(i); String file = null; if (dKey == null) { file = tryGetAsString(iFiles, i); if (file != null) file = decryptFile(file, dKey); } else { file = tryGetAsString(files, j++); } data.add(new KeyValue(hashes.get(i), file)); } return data; } private static String tryGetAsString(JsonArray arr, int idx) { if (arr == null) return null; JsonElement j = arr.get(idx); if (j == null) return null; return j.getAsString(); } public KeyValue receiveData(Transaction tx, TransactionData data) { String intermediateFile = fetchFile(data.data()); if (intermediateFile == null) return null; if (data.type() == TransactionDataType.CLEARTEXT_STORE) return new KeyValue(keyToString(data.data()), intermediateFile); PermissionFileData iData = getIntermediateFileData(tx, intermediateFile); String dKey = iData.decryptionKey(); byte[] fileHash = keyToBytes(iData.fileHash()); String file = fetchFile(fileHash); if (file == null) return null; file = decryptFile(file, dKey); return new KeyValue(iData.fileHash(), file); } private PermissionFileData getIntermediateFileData(Transaction tx, String encryptedIntermediateFile) { byte[] secret = getSharedSecret(tx); KeyParameter key = new KeyParameter(secret); String decryptedIntermediateFile = decryptFile(encryptedIntermediateFile, key); System.out.println("Decrypted intermediate file: " + decryptedIntermediateFile); return Gsons.ugly().fromJson(decryptedIntermediateFile, PermissionFileData.class); } private JsonElement parseJson(String file) { return jsonParser.parse(file); } private String fetchFile(byte[] hash) { String hashString = TransactionUtils.transactionDataToString(hash); Config config = Joe.JOE.config(); QueryStringEncoder qs = new QueryStringEncoder(config.keepers().get(0).toString()); qs.addParam("key", hashString); HttpResponseData response = null; try { response = Utils.get(qs.toUri()); } catch (URISyntaxException e) { logger.error("Constructed bad URI: " + qs, e); throw new StorageException("constructed bad URI for fetching file from keeper", e); } if (response.code() > 399) { logger.error("Hash not found in storage: " + hashString); return null; } return response.response(); } private JsonArray fetchFiles(String keysCsv) { Config config = Joe.JOE.config(); QueryStringEncoder qs = new QueryStringEncoder(config.keepers().get(0).toString()); qs.addParam("keys", keysCsv); HttpResponseData response = null; try { response = Utils.get(qs.toUri()); } catch (URISyntaxException e) { logger.error("Constructed bad URI: " + qs, e); throw new StorageException("constructed bad URI for fetching file from keeper", e); } if (response.code() > 399) { logger.error("Hashes not found in storage: " + keysCsv); return null; } return (JsonArray) new JsonParser().parse(response.response()); } private String decryptFile(String file, String decryptionKey) { return decryptFile(file, new KeyParameter(encryptionKeyToBytes(decryptionKey))); } private String decryptFile(String encryptedFile, KeyParameter decryptionKey) { if (decryptionKey == null) return encryptedFile; byte[] encrypted = ciphertextToBytes(encryptedFile); byte[] decrypted = null; try { decrypted = AESUtils.decrypt(encrypted, decryptionKey); } catch (Exception e) { throw new IllegalArgumentException("Failed to decrypt data with provided decryption key: " + encryptedFile, e); } return fileDataToString(decrypted); } public static byte[] encryptFile(String file, KeyParameter key) { return AESUtils.encrypt(fileStringToBytes(file), key); } private byte[] getSharedSecret(Transaction tx) { TransactionInput in = tx.getInput(0); List<TransactionOutput> toMe = TransactionUtils.getReceived(wallet, tx); TransactionOutput out = null; for (TransactionOutput o: toMe) { if (o.getValue().equals(ShareRequest.SHARING_COST)) { out = o; break; } } if (out == null) out = toMe.get(0); byte[] theirPubKey = in.getScriptSig().getPubKey(); KeyBag keyBag = new DecryptingKeyBag(wallet, null); ECKey myKey = keyBag.findKeyFromPubHash(out.getAddressFromP2PKHScript(params).getHash160()); BigInteger myPrivKey = myKey.getPrivKey(); return ECUtils.getSharedSecret(ECKey.fromPublicOnly(theirPubKey).getPubKeyPoint(), myPrivKey); } public byte[] getData(String key) { QueryStringEncoder qs = new QueryStringEncoder(Joe.JOE.config().keepers().get(0).toString()); qs.addParam("key", key); HttpResponseData response = null; try { response = Utils.get(qs.toUri()); } catch (URISyntaxException e) { logger.error("Constructed bad URI: " + qs, e); return null; } if (response.code() > 399) { logger.error("Hash not found in storage: " + key); return null; } System.out.println("Hash found in storage!"); System.out.println("Key: " + key); System.out.println("Value: " + response.response()); return response.response().getBytes(CharsetUtil.UTF_8); } // public static HttpResponseData store(byte[] hash, byte[] data) { // return store(keyToString(hash), ciphertextBytesToString(data)); // } public static HttpResponseData store(String key, String value) { Config.AddressConfig keeper = Joe.JOE.config().keepers().get(0); QueryStringEncoder url = new QueryStringEncoder(keeper.toString()); url.addParam("key", key); url.addParam("val", value); try { return io.tradle.joe.utils.Utils.get(url.toUri()); } catch (URISyntaxException e) { // should never happen... throw new IllegalArgumentException("invalid keeper url", e); } } public static String getStorageKeyStringFor(String file) { return keyToString( getStorageKeyFor( fileStringToBytes(file))); } public static String getStorageKeyStringFor(byte[] data) { return keyToString( getStorageKeyFor(data)); } public static byte[] getStorageKeyFor(byte[] data) { return SHA256.getHash(data, false); } public static byte[] keyToBytes(String key) { try { return Base58.decode(key); } catch (AddressFormatException e) { throw new IllegalArgumentException("Provided key was not in Base58 encoding", e); } } public static String keyToString(byte[] key) { return Base58.encode(key); } public static String encryptionKeyToString(byte[] key) { return new String(Hex.encode(key)); } public static byte[] encryptionKeyToBytes(String key) { return Hex.decode(key); } public static String ciphertextBytesToString(byte[] ciphertextBytes) { return Base64.encodeBase64String(ciphertextBytes); } public static byte[] ciphertextToBytes(String ciphertext) { return Base64.decodeBase64(ciphertext); } public static String fileDataToString(byte[] fileData) { return new String(fileData, FILE_ENCODING); } public static byte[] fileStringToBytes(String file) { return file.getBytes(FILE_ENCODING); } }
/*L * Copyright Duke Comprehensive Cancer Center * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catrip/LICENSE.txt for details. */ /* * Created on Apr 28, 2006 */ package gov.nih.nci.cagrid.ant; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; import net.sourceforge.cobertura.instrument.Main; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; public class Instrument extends Task { private List artifactList; private File tempDir; private File datafile; public Instrument() { super(); this.artifactList = new ArrayList(); } public File getTempDir() { return tempDir; } public void setTempDir(File tempDir) { this.tempDir = tempDir; } public File getDatafile() { return datafile; } public void setDatafile(File datafile) { this.datafile = datafile; } public void addConfiguredArtifact(Artifact artifact) { this.artifactList.add(artifact); } public void execute() throws BuildException { super.execute(); for (int i = 0; i < artifactList.size(); i++) { Artifact artifact = (Artifact) artifactList.get(i); File[] artifactFiles = Utils.getFiles(getProject(), artifact.getFileSetList()); for (int j = 0; j < artifactFiles.length; j++) { File artifactFile = artifactFiles[j]; // mkdir temp dir File tempDir = getTempDir(this.tempDir, artifact, artifactFile); tempDir.mkdirs(); // create utility dirs File classDir = new File(tempDir, "intstrumented-classes"); File libDir = new File(tempDir, "lib"); File ilibDir = new File(tempDir, "instrumented-lib"); classDir.mkdir(); libDir.mkdir(); ilibDir.mkdir(); // files File copyFile = new File(libDir, artifactFile.getName()); File instrumentedFile = new File(ilibDir, artifactFile.getName()); // instrument try { Utils.copy(artifactFile, copyFile); unjar(copyFile, classDir); instrument(classDir); jar(classDir, instrumentedFile); Utils.copy(instrumentedFile, artifactFile); } catch (Exception e) { //e.printStackTrace(); throw new BuildException(e); } System.out.println("instrumented " + artifactFile.getName()); } } } public static File getTempDir(File rootTempDir, Artifact artifact, File artifactFile) { return new File(rootTempDir, artifact.getIdentifer() + File.separator + artifactFile.getName() + "-dir"); } private void unjar(File jarFile, File destDir) throws ZipException, IOException { ZipFile zipfile = new ZipFile(jarFile); Enumeration e = zipfile.entries(); while (e.hasMoreElements()) { ZipEntry entry = (ZipEntry) e.nextElement(); File outFile = new File(destDir, entry.getName()); if (entry.isDirectory()) { outFile.mkdir(); continue; } outFile.getParentFile().mkdirs(); BufferedInputStream is = new BufferedInputStream(zipfile.getInputStream(entry)); BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outFile)); byte data[] = new byte[10240]; int count = -1; while ((count = is.read(data)) != -1) os.write(data, 0, count); os.flush(); os.close(); is.close(); } } private void instrument(File classDir) { System.out.println(classDir); Main.main(new String[] { //"--basedir", classDir.toString(), "--datafile", datafile.toString(), classDir.toString(), }); } private void jar(File dir, File destFile) throws IOException, InterruptedException { String jar = System.getenv("JAVA_HOME") + File.separator + "bin" + File.separator + "jar"; String[] files = dir.list(); String[] cmd = new String[] { jar, "cf", destFile.getAbsolutePath(), "-C", dir.getAbsolutePath(), "*", }; System.out.println(flatten(cmd)); Process p = Runtime.getRuntime().exec(cmd, null, dir); p.waitFor(); /* try { Jar jar = new Jar(); jar.setBasedir(dir); FileSet fs = new FileSet(); //NameEntry ne = fs.createInclude(); //ne.setName("**\/*"); fs.createPatternSet().createInclude().setName("**\/*"); jar.addFileset(fs); jar.setDestFile(destFile); jar.init(); jar.setIncludes("**\/*"); jar.execute(); } catch (Exception e) { e.printStackTrace(); throw new IOException(e.getMessage()); } */ /* JarOutputStream os = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(destFile)), new Manifest()); jar(dir, dir, os); os.flush(); os.close(); */ } private String flatten(String[] cmd) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < cmd.length; i++) { if (i > 0) buf.append(' '); buf.append(cmd[i]); } return buf.toString(); } private void jar(File baseDir, File file, JarOutputStream os) throws IOException { if (file.isDirectory()) { File[] files = file.listFiles(); for (int i = 0; i < files.length; i++) { jar(baseDir, files[i], os); } } else if (file.getName().startsWith("Manifest")) { return; } { BufferedInputStream is = new BufferedInputStream(new FileInputStream(file)); String jarName = getJarName(baseDir, file); JarEntry entry = new JarEntry(jarName); os.putNextEntry(entry); byte data[] = new byte[10240]; int count = -1; while ((count = is.read(data)) != -1) os.write(data, 0, count); is.close(); os.closeEntry(); } } private String getJarName(File baseDir, File file) { return file.getAbsolutePath().substring(baseDir.getAbsolutePath().length()); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.extension.table.rdbms; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.wso2.siddhi.core.ExecutionPlanRuntime; import org.wso2.siddhi.core.SiddhiManager; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.query.output.callback.QueryCallback; import org.wso2.siddhi.core.stream.input.InputHandler; import org.wso2.siddhi.core.util.EventPrinter; import org.wso2.siddhi.query.api.exception.DuplicateDefinitionException; import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; public class UpdateOrInsertTableTestCase { private static final Logger log = Logger.getLogger(UpdateOrInsertTableTestCase.class); private int inEventCount; private int removeEventCount; private boolean eventArrived; private DataSource dataSource = new BasicDataSource(); @Before public void init() { inEventCount = 0; removeEventCount = 0; eventArrived = false; } @Test public void updateOrInsertTableTest1() throws InterruptedException { log.info("updateOrInsertTableTest1"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(org.wso2.siddhi.extension.table.rdbms.RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream UpdateStockStream (symbol string, price float, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "update or insert into StockTable " + " on StockTable.symbol=='IBM' ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 75.6f, 100l}); stockStream.send(new Object[]{"WSO2", 57.6f, 100l}); updateStockStream.send(new Object[]{"GOOG", 10.6f, 100l}); Thread.sleep(500); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest2() throws InterruptedException { log.info("updateOrInsertTableTest2"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query2') " + "from StockStream " + "update or insert into StockTable " + " on StockTable.symbol==symbol ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 75.6f, 100l}); stockStream.send(new Object[]{"WSO2", 57.6f, 100l}); stockStream.send(new Object[]{"WSO2", 10f, 100l}); Thread.sleep(500); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest3() throws InterruptedException { log.info("updateOrInsertTableTest3"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long); " + "define stream UpdateStockStream (symbol string, price float, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume==StockTable.volume) in " + "StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; case 3: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; default: Assert.assertSame(3, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 55.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); updateStockStream.send(new Object[]{"IBM", 77.6f, 200l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); Thread.sleep(500); Assert.assertEquals("Number of success events", 3, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest4() throws InterruptedException { log.info("updateOrInsertTableTest4"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query2') " + "from StockStream " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume==StockTable.volume) in " + "StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; case 3: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; default: Assert.assertSame(3, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 55.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); stockStream.send(new Object[]{"IBM", 77.6f, 200l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); Thread.sleep(500); Assert.assertEquals("Number of success events", 3, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Ignore @Test(expected = DuplicateDefinitionException.class) public void updateOrInsertTableTest5() throws InterruptedException { log.info("updateOrInsertTableTest5"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long); " + "define stream UpdateStockStream (comp string, vol long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "select comp as symbol, vol as volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 55.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); updateStockStream.send(new Object[]{"FB", 300l}); checkStockStream.send(new Object[]{"FB", 300l}); checkStockStream.send(new Object[]{"WSO2", 100l}); Thread.sleep(500); Assert.assertEquals("Number of success events", 0, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", false, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest6() throws InterruptedException { log.info("updateOrInsertTableTest6"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long); " + "define stream UpdateStockStream (comp string, vol long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "' , bloom.filters = 'enable') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "select comp as symbol, 0f as price, vol as volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume==StockTable.volume) in " + "StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; case 3: Assert.assertArrayEquals(new Object[]{"WSO2", 100l}, event.getData()); break; default: Assert.assertSame(3, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 55.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); updateStockStream.send(new Object[]{"IBM", 200l}); updateStockStream.send(new Object[]{"FB", 300l}); checkStockStream.send(new Object[]{"IBM", 100l}); checkStockStream.send(new Object[]{"WSO2", 100l}); Thread.sleep(500); Assert.assertEquals("Number of success events", 3, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest7() throws InterruptedException { log.info("updateOrInsertTableTest7"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long, price float); " + "define stream UpdateStockStream (comp string, vol long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "select comp as symbol, 5f as price, vol as volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume==StockTable.volume and price < " + "StockTable.price) in StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l, 56.6f}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"IBM", 200l, 0f}, event.getData()); break; default: Assert.assertSame(2, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 155.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l, 56.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); updateStockStream.send(new Object[]{"IBM", 200l}); checkStockStream.send(new Object[]{"IBM", 200l, 0f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); Thread.sleep(2000); Assert.assertEquals("Number of success events", 2, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest8() throws InterruptedException { log.info("updateOrInsertTableTest8"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long, price float); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query2') " + "from StockStream " + "select symbol, price, volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume==StockTable.volume and price < " + "StockTable.price) in StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l, 55.6f}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"IBM", 200l, 55.6f}, event.getData()); break; default: Assert.assertSame(2, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 155.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l, 55.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); stockStream.send(new Object[]{"IBM", 155.6f, 200l}); checkStockStream.send(new Object[]{"IBM", 200l, 55.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); Thread.sleep(500); Assert.assertEquals("Number of success events", 2, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest9() throws InterruptedException { log.info("updateOrInsertTableTest9"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long, price float); " + "define stream UpdateStockStream (comp string, vol long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream left outer join StockTable " + " on UpdateStockStream.comp == StockTable.symbol " + "select symbol, ifThenElse(price is null,0f,price) as price, vol as volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(CheckStockStream.symbol==StockTable.symbol and CheckStockStream" + ".volume==StockTable.volume and CheckStockStream.price < StockTable.price) in StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 100l, 55.6f}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"IBM", 200l, 55.6f}, event.getData()); break; default: Assert.assertSame(2, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 155.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l, 55.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); updateStockStream.send(new Object[]{"IBM", 200l}); checkStockStream.send(new Object[]{"IBM", 200l, 55.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); Thread.sleep(1000); Assert.assertEquals("Number of success events", 2, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void updateOrInsertTableTest10() throws InterruptedException { log.info("updateOrInsertTableTest10"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try (Connection connection = dataSource.getConnection()) { if (connection != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream CheckStockStream (symbol string, volume long, price float); " + "define stream UpdateStockStream (comp string, vol long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream left outer join StockTable " + " on UpdateStockStream.comp == StockTable.symbol " + "select comp as symbol, ifThenElse(price is null,5f,price) as price, vol as volume " + "update or insert into StockTable " + " on StockTable.symbol==symbol;" + "" + "@info(name = 'query3') " + "from CheckStockStream[(symbol==StockTable.symbol and volume == StockTable.volume and price <" + " StockTable.price) in StockTable] " + "insert into OutStream;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); executionPlanRuntime.addCallback("query3", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); if (inEvents != null) { for (Event event : inEvents) { inEventCount++; switch (inEventCount) { case 1: Assert.assertArrayEquals(new Object[]{"IBM", 200l, 0f}, event.getData()); break; case 2: Assert.assertArrayEquals(new Object[]{"WSO2", 300l, 4.6f}, event.getData()); break; default: Assert.assertSame(2, inEventCount); } } eventArrived = true; } if (removeEvents != null) { removeEventCount = removeEventCount + removeEvents.length; } eventArrived = true; } }); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler checkStockStream = executionPlanRuntime.getInputHandler("CheckStockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); checkStockStream.send(new Object[]{"IBM", 100l, 155.6f}); checkStockStream.send(new Object[]{"WSO2", 100l, 155.6f}); updateStockStream.send(new Object[]{"IBM", 200l}); updateStockStream.send(new Object[]{"WSO2", 300l}); checkStockStream.send(new Object[]{"IBM", 200l, 0f}); checkStockStream.send(new Object[]{"WSO2", 300l, 4.6f}); Thread.sleep(1000); Assert.assertEquals("Number of success events", 2, inEventCount); Assert.assertEquals("Number of remove events", 0, removeEventCount); Assert.assertEquals("Event arrived", true, eventArrived); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void insertOverwriteTableTest11() throws InterruptedException { log.info("insertOverwriteTableTest11"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try { if (dataSource.getConnection() != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream UpdateStockStream (symbol string, price float, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "update or insert into StockTable " + " on StockTable.volume==volume ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 75.6f, 100l}); stockStream.send(new Object[]{"WSO2", 57.6f, 100l}); updateStockStream.send(new Object[]{"GOOG", 10.6f, 100l}); long totalRowsInTable = DBConnectionHelper.getDBConnectionHelperInstance().getRowsInTable(dataSource); Assert.assertEquals("Update failed", 3, totalRowsInTable); Thread.sleep(500); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } @Test public void insertOverwriteTableTest12() throws InterruptedException { log.info("insertOverwriteTableTest12"); SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setDataSource(RDBMSTestConstants.DATA_SOURCE_NAME, dataSource); try { if (dataSource.getConnection() != null) { DBConnectionHelper.getDBConnectionHelperInstance().clearDatabaseTable(dataSource, RDBMSTestConstants .TABLE_NAME); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "define stream UpdateStockStream (symbol string, price float, volume long); " + "@store(type = 'rdbms' , datasource.name = '" + RDBMSTestConstants.DATA_SOURCE_NAME + "'" + " , table.name = '" + RDBMSTestConstants.TABLE_NAME + "') " + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;" + "" + "@info(name = 'query2') " + "from UpdateStockStream " + "update or insert into StockTable " + " on StockTable.volume == volume ;"; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(streams + query); InputHandler stockStream = executionPlanRuntime.getInputHandler("StockStream"); InputHandler updateStockStream = executionPlanRuntime.getInputHandler("UpdateStockStream"); executionPlanRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100l}); stockStream.send(new Object[]{"IBM", 75.6f, 100l}); stockStream.send(new Object[]{"WSO2", 57.6f, 100l}); updateStockStream.send(new Object[]{"GOOG", 10.6f, 200l}); long totalRowsInTable = DBConnectionHelper.getDBConnectionHelperInstance().getRowsInTable(dataSource); Assert.assertEquals("Update failed", 4, totalRowsInTable); Thread.sleep(500); executionPlanRuntime.shutdown(); } } catch (SQLException e) { log.info("Test case ignored due to DB connection unavailability"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.hbase; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.EventDriven; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.hbase.put.PutColumn; import org.apache.nifi.hbase.put.PutFlowFile; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; @EventDriven @SupportsBatching @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"hadoop", "hbase", "put", "json"}) @CapabilityDescription("Adds rows to HBase based on the contents of incoming JSON documents. Each FlowFile must contain a single " + "UTF-8 encoded JSON document, and any FlowFiles where the root element is not a single document will be routed to failure. " + "Each JSON field name and value will become a column qualifier and value of the HBase row. Any fields with a null value " + "will be skipped, and fields with a complex value will be handled according to the Complex Field Strategy. " + "The row id can be specified either directly on the processor through the Row Identifier property, or can be extracted from the JSON " + "document by specifying the Row Identifier Field Name property. This processor will hold the contents of all FlowFiles for the given batch " + "in memory at one time.") public class PutHBaseJSON extends AbstractPutHBase { protected static final PropertyDescriptor ROW_FIELD_NAME = new PropertyDescriptor.Builder() .name("Row Identifier Field Name") .description("Specifies the name of a JSON element whose value should be used as the row id for the given JSON document.") .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); protected static final String FAIL_VALUE = "Fail"; protected static final String WARN_VALUE = "Warn"; protected static final String IGNORE_VALUE = "Ignore"; protected static final String TEXT_VALUE = "Text"; protected static final AllowableValue COMPLEX_FIELD_FAIL = new AllowableValue(FAIL_VALUE, FAIL_VALUE, "Route entire FlowFile to failure if any elements contain complex values."); protected static final AllowableValue COMPLEX_FIELD_WARN = new AllowableValue(WARN_VALUE, WARN_VALUE, "Provide a warning and do not include field in row sent to HBase."); protected static final AllowableValue COMPLEX_FIELD_IGNORE = new AllowableValue(IGNORE_VALUE, IGNORE_VALUE, "Silently ignore and do not include in row sent to HBase."); protected static final AllowableValue COMPLEX_FIELD_TEXT = new AllowableValue(TEXT_VALUE, TEXT_VALUE, "Use the string representation of the complex field as the value of the given column."); protected static final PropertyDescriptor COMPLEX_FIELD_STRATEGY = new PropertyDescriptor.Builder() .name("Complex Field Strategy") .description("Indicates how to handle complex fields, i.e. fields that do not have a single text value.") .expressionLanguageSupported(false) .required(true) .allowableValues(COMPLEX_FIELD_FAIL, COMPLEX_FIELD_WARN, COMPLEX_FIELD_IGNORE, COMPLEX_FIELD_TEXT) .defaultValue(COMPLEX_FIELD_TEXT.getValue()) .build(); protected static final AllowableValue FIELD_ENCODING_STRING = new AllowableValue(STRING_ENCODING_VALUE, STRING_ENCODING_VALUE, "Stores the value of each field as a UTF-8 String."); protected static final AllowableValue FIELD_ENCODING_BYTES = new AllowableValue(BYTES_ENCODING_VALUE, BYTES_ENCODING_VALUE, "Stores the value of each field as the byte representation of the type derived from the JSON."); protected static final PropertyDescriptor FIELD_ENCODING_STRATEGY = new PropertyDescriptor.Builder() .name("Field Encoding Strategy") .description(("Indicates how to store the value of each field in HBase. The default behavior is to convert each value from the " + "JSON to a String, and store the UTF-8 bytes. Choosing Bytes will interpret the type of each field from " + "the JSON, and convert the value to the byte representation of that type, meaning an integer will be stored as the " + "byte representation of that integer.")) .required(true) .allowableValues(FIELD_ENCODING_STRING, FIELD_ENCODING_BYTES) .defaultValue(FIELD_ENCODING_STRING.getValue()) .build(); @Override public final List<PropertyDescriptor> getSupportedPropertyDescriptors() { final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(HBASE_CLIENT_SERVICE); properties.add(TABLE_NAME); properties.add(ROW_ID); properties.add(ROW_FIELD_NAME); properties.add(ROW_ID_ENCODING_STRATEGY); properties.add(COLUMN_FAMILY); properties.add(TIMESTAMP); properties.add(BATCH_SIZE); properties.add(COMPLEX_FIELD_STRATEGY); properties.add(FIELD_ENCODING_STRATEGY); return properties; } @Override public Set<Relationship> getRelationships() { final Set<Relationship> rels = new HashSet<>(); rels.add(REL_SUCCESS); rels.add(REL_FAILURE); return rels; } @Override protected Collection<ValidationResult> customValidate(ValidationContext validationContext) { final Collection<ValidationResult> results = new ArrayList<>(); final String rowId = validationContext.getProperty(ROW_ID).getValue(); final String rowFieldName = validationContext.getProperty(ROW_FIELD_NAME).getValue(); if (StringUtils.isBlank(rowId) && StringUtils.isBlank(rowFieldName)) { results.add(new ValidationResult.Builder() .subject(this.getClass().getSimpleName()) .explanation("Row Identifier or Row Identifier Field Name is required") .valid(false) .build()); } else if (!StringUtils.isBlank(rowId) && !StringUtils.isBlank(rowFieldName)) { results.add(new ValidationResult.Builder() .subject(this.getClass().getSimpleName()) .explanation("Row Identifier and Row Identifier Field Name can not be used together") .valid(false) .build()); } return results; } @Override protected PutFlowFile createPut(final ProcessSession session, final ProcessContext context, final FlowFile flowFile) { final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String rowId = context.getProperty(ROW_ID).evaluateAttributeExpressions(flowFile).getValue(); final String rowFieldName = context.getProperty(ROW_FIELD_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String columnFamily = context.getProperty(COLUMN_FAMILY).evaluateAttributeExpressions(flowFile).getValue(); final String timestampValue = context.getProperty(TIMESTAMP).evaluateAttributeExpressions(flowFile).getValue(); final boolean extractRowId = !StringUtils.isBlank(rowFieldName); final String complexFieldStrategy = context.getProperty(COMPLEX_FIELD_STRATEGY).getValue(); final String fieldEncodingStrategy = context.getProperty(FIELD_ENCODING_STRATEGY).getValue(); final String rowIdEncodingStrategy = context.getProperty(ROW_ID_ENCODING_STRATEGY).getValue(); final Long timestamp; if (!StringUtils.isBlank(timestampValue)) { try { timestamp = Long.valueOf(timestampValue); } catch (Exception e) { getLogger().error("Invalid timestamp value: " + timestampValue, e); return null; } } else { timestamp = null; } // Parse the JSON document final ObjectMapper mapper = new ObjectMapper(); final AtomicReference<JsonNode> rootNodeRef = new AtomicReference<>(null); try { session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { try (final InputStream bufferedIn = new BufferedInputStream(in)) { rootNodeRef.set(mapper.readTree(bufferedIn)); } } }); } catch (final ProcessException pe) { getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[]{flowFile, pe.toString()}, pe); return null; } final JsonNode rootNode = rootNodeRef.get(); if (rootNode.isArray()) { getLogger().error("Root node of JSON must be a single document, found array for {}; routing to failure", new Object[]{flowFile}); return null; } final Collection<PutColumn> columns = new ArrayList<>(); final AtomicReference<String> rowIdHolder = new AtomicReference<>(null); // convert each field/value to a column for the put, skip over nulls and arrays final Iterator<String> fieldNames = rootNode.fieldNames(); while (fieldNames.hasNext()) { final String fieldName = fieldNames.next(); final AtomicReference<byte[]> fieldValueHolder = new AtomicReference<>(null); final JsonNode fieldNode = rootNode.get(fieldName); if (fieldNode.isNull()) { getLogger().debug("Skipping {} because value was null", new Object[]{fieldName}); } else if (fieldNode.isValueNode()) { // for a value node we need to determine if we are storing the bytes of a string, or the bytes of actual types if (STRING_ENCODING_VALUE.equals(fieldEncodingStrategy)) { final byte[] valueBytes = clientService.toBytes(fieldNode.asText()); fieldValueHolder.set(valueBytes); } else { fieldValueHolder.set(extractJNodeValue(fieldNode)); } } else { // for non-null, non-value nodes, determine what to do based on the handling strategy switch (complexFieldStrategy) { case FAIL_VALUE: getLogger().error("Complex value found for {}; routing to failure", new Object[]{fieldName}); return null; case WARN_VALUE: getLogger().warn("Complex value found for {}; skipping", new Object[]{fieldName}); break; case TEXT_VALUE: // use toString() here because asText() is only guaranteed to be supported on value nodes // some other types of nodes, like ArrayNode, provide toString implementations fieldValueHolder.set(clientService.toBytes(fieldNode.toString())); break; case IGNORE_VALUE: // silently skip break; default: break; } } // if we have a field value, then see if this is the row id field, if so store the value for later // otherwise add a new column where the fieldName and fieldValue are the column qualifier and value if (fieldValueHolder.get() != null) { if (extractRowId && fieldName.equals(rowFieldName)) { rowIdHolder.set(fieldNode.asText()); } else { final byte[] colFamBytes = columnFamily.getBytes(StandardCharsets.UTF_8); final byte[] colQualBytes = fieldName.getBytes(StandardCharsets.UTF_8); final byte[] colValBytes = fieldValueHolder.get(); columns.add(new PutColumn(colFamBytes, colQualBytes, colValBytes, timestamp)); } } } // if we are expecting a field name to use for the row id and the incoming document doesn't have it // log an error message so the user can see what the field names were and return null so it gets routed to failure if (extractRowId && rowIdHolder.get() == null) { final String fieldNameStr = StringUtils.join(rootNode.fieldNames(), ","); getLogger().error("Row ID field named '{}' not found in field names '{}'; routing to failure", new Object[] {rowFieldName, fieldNameStr}); return null; } final String putRowId = (extractRowId ? rowIdHolder.get() : rowId); byte[] rowKeyBytes = getRow(putRowId, rowIdEncodingStrategy); return new PutFlowFile(tableName, rowKeyBytes, columns, flowFile); } /* *Handles the conversion of the JsonNode value into it correct underlying data type in the form of a byte array as expected by the columns.add function */ private byte[] extractJNodeValue(final JsonNode n){ if (n.isBoolean()){ //boolean return clientService.toBytes(n.asBoolean()); }else if(n.isNumber()){ if(n.isIntegralNumber()){ //interpret as Long return clientService.toBytes(n.asLong()); }else{ //interpret as Double return clientService.toBytes(n.asDouble()); } }else{ //if all else fails, interpret as String return clientService.toBytes(n.asText()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.net; import java.io.*; import java.lang.management.ManagementFactory; import java.net.*; import java.nio.channels.AsynchronousCloseException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ServerSocketChannel; import java.util.*; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.management.MBeanServer; import javax.management.ObjectName; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.ScheduledExecutors; import org.apache.cassandra.concurrent.Stage; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.concurrent.TracingAwareExecutorService; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions; import org.apache.cassandra.db.*; import org.apache.cassandra.batchlog.Batch; import org.apache.cassandra.dht.AbstractBounds; import org.apache.cassandra.dht.BootStrapper; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.gms.EchoMessage; import org.apache.cassandra.gms.GossipDigestAck; import org.apache.cassandra.gms.GossipDigestAck2; import org.apache.cassandra.gms.GossipDigestSyn; import org.apache.cassandra.hints.HintMessage; import org.apache.cassandra.hints.HintResponse; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.locator.ILatencySubscriber; import org.apache.cassandra.metrics.ConnectionMetrics; import org.apache.cassandra.metrics.DroppedMessageMetrics; import org.apache.cassandra.repair.messages.RepairMessage; import org.apache.cassandra.security.SSLFactory; import org.apache.cassandra.service.*; import org.apache.cassandra.service.paxos.Commit; import org.apache.cassandra.service.paxos.PrepareResponse; import org.apache.cassandra.tracing.TraceState; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.utils.*; import org.apache.cassandra.utils.concurrent.SimpleCondition; public final class MessagingService implements MessagingServiceMBean { public static final String MBEAN_NAME = "org.apache.cassandra.net:type=MessagingService"; // 8 bits version, so don't waste versions public static final int VERSION_12 = 6; public static final int VERSION_20 = 7; public static final int VERSION_21 = 8; public static final int VERSION_22 = 9; public static final int VERSION_30 = 10; public static final int current_version = VERSION_30; public static final String FAILURE_CALLBACK_PARAM = "CAL_BAC"; public static final byte[] ONE_BYTE = new byte[1]; public static final String FAILURE_RESPONSE_PARAM = "FAIL"; /** * we preface every message with this number so the recipient can validate the sender is sane */ public static final int PROTOCOL_MAGIC = 0xCA552DFA; private boolean allNodesAtLeast22 = true; private boolean allNodesAtLeast30 = true; /* All verb handler identifiers */ public enum Verb { MUTATION, HINT, READ_REPAIR, READ, REQUEST_RESPONSE, // client-initiated reads and writes BATCH_STORE, // was @Deprecated STREAM_INITIATE, BATCH_REMOVE, // was @Deprecated STREAM_INITIATE_DONE, @Deprecated STREAM_REPLY, @Deprecated STREAM_REQUEST, RANGE_SLICE, @Deprecated BOOTSTRAP_TOKEN, @Deprecated TREE_REQUEST, @Deprecated TREE_RESPONSE, @Deprecated JOIN, GOSSIP_DIGEST_SYN, GOSSIP_DIGEST_ACK, GOSSIP_DIGEST_ACK2, @Deprecated DEFINITIONS_ANNOUNCE, DEFINITIONS_UPDATE, TRUNCATE, SCHEMA_CHECK, @Deprecated INDEX_SCAN, REPLICATION_FINISHED, INTERNAL_RESPONSE, // responses to internal calls COUNTER_MUTATION, @Deprecated STREAMING_REPAIR_REQUEST, @Deprecated STREAMING_REPAIR_RESPONSE, SNAPSHOT, // Similar to nt snapshot MIGRATION_REQUEST, GOSSIP_SHUTDOWN, _TRACE, // dummy verb so we can use MS.droppedMessagesMap ECHO, REPAIR_MESSAGE, PAXOS_PREPARE, PAXOS_PROPOSE, PAXOS_COMMIT, @Deprecated PAGED_RANGE, // remember to add new verbs at the end, since we serialize by ordinal UNUSED_1, UNUSED_2, UNUSED_3, UNUSED_4, UNUSED_5, ; } public static final EnumMap<MessagingService.Verb, Stage> verbStages = new EnumMap<MessagingService.Verb, Stage>(MessagingService.Verb.class) {{ put(Verb.MUTATION, Stage.MUTATION); put(Verb.COUNTER_MUTATION, Stage.COUNTER_MUTATION); put(Verb.READ_REPAIR, Stage.MUTATION); put(Verb.HINT, Stage.MUTATION); put(Verb.TRUNCATE, Stage.MUTATION); put(Verb.PAXOS_PREPARE, Stage.MUTATION); put(Verb.PAXOS_PROPOSE, Stage.MUTATION); put(Verb.PAXOS_COMMIT, Stage.MUTATION); put(Verb.BATCH_STORE, Stage.MUTATION); put(Verb.BATCH_REMOVE, Stage.MUTATION); put(Verb.READ, Stage.READ); put(Verb.RANGE_SLICE, Stage.READ); put(Verb.INDEX_SCAN, Stage.READ); put(Verb.PAGED_RANGE, Stage.READ); put(Verb.REQUEST_RESPONSE, Stage.REQUEST_RESPONSE); put(Verb.INTERNAL_RESPONSE, Stage.INTERNAL_RESPONSE); put(Verb.STREAM_REPLY, Stage.MISC); // actually handled by FileStreamTask and streamExecutors put(Verb.STREAM_REQUEST, Stage.MISC); put(Verb.REPLICATION_FINISHED, Stage.MISC); put(Verb.SNAPSHOT, Stage.MISC); put(Verb.TREE_REQUEST, Stage.ANTI_ENTROPY); put(Verb.TREE_RESPONSE, Stage.ANTI_ENTROPY); put(Verb.STREAMING_REPAIR_REQUEST, Stage.ANTI_ENTROPY); put(Verb.STREAMING_REPAIR_RESPONSE, Stage.ANTI_ENTROPY); put(Verb.REPAIR_MESSAGE, Stage.ANTI_ENTROPY); put(Verb.GOSSIP_DIGEST_ACK, Stage.GOSSIP); put(Verb.GOSSIP_DIGEST_ACK2, Stage.GOSSIP); put(Verb.GOSSIP_DIGEST_SYN, Stage.GOSSIP); put(Verb.GOSSIP_SHUTDOWN, Stage.GOSSIP); put(Verb.DEFINITIONS_UPDATE, Stage.MIGRATION); put(Verb.SCHEMA_CHECK, Stage.MIGRATION); put(Verb.MIGRATION_REQUEST, Stage.MIGRATION); put(Verb.INDEX_SCAN, Stage.READ); put(Verb.REPLICATION_FINISHED, Stage.MISC); put(Verb.COUNTER_MUTATION, Stage.MUTATION); put(Verb.SNAPSHOT, Stage.MISC); put(Verb.ECHO, Stage.GOSSIP); put(Verb.UNUSED_1, Stage.INTERNAL_RESPONSE); put(Verb.UNUSED_2, Stage.INTERNAL_RESPONSE); put(Verb.UNUSED_3, Stage.INTERNAL_RESPONSE); }}; /** * Messages we receive in IncomingTcpConnection have a Verb that tells us what kind of message it is. * Most of the time, this is enough to determine how to deserialize the message payload. * The exception is the REQUEST_RESPONSE verb, which just means "a reply to something you told me to do." * Traditionally, this was fine since each VerbHandler knew what type of payload it expected, and * handled the deserialization itself. Now that we do that in ITC, to avoid the extra copy to an * intermediary byte[] (See CASSANDRA-3716), we need to wire that up to the CallbackInfo object * (see below). */ public static final EnumMap<Verb, IVersionedSerializer<?>> verbSerializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class) {{ put(Verb.REQUEST_RESPONSE, CallbackDeterminedSerializer.instance); put(Verb.INTERNAL_RESPONSE, CallbackDeterminedSerializer.instance); put(Verb.MUTATION, Mutation.serializer); put(Verb.READ_REPAIR, Mutation.serializer); put(Verb.READ, ReadCommand.serializer); put(Verb.RANGE_SLICE, ReadCommand.rangeSliceSerializer); put(Verb.PAGED_RANGE, ReadCommand.legacyPagedRangeCommandSerializer); put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance); put(Verb.REPAIR_MESSAGE, RepairMessage.serializer); put(Verb.GOSSIP_DIGEST_ACK, GossipDigestAck.serializer); put(Verb.GOSSIP_DIGEST_ACK2, GossipDigestAck2.serializer); put(Verb.GOSSIP_DIGEST_SYN, GossipDigestSyn.serializer); put(Verb.DEFINITIONS_UPDATE, MigrationManager.MigrationsSerializer.instance); put(Verb.TRUNCATE, Truncation.serializer); put(Verb.REPLICATION_FINISHED, null); put(Verb.COUNTER_MUTATION, CounterMutation.serializer); put(Verb.SNAPSHOT, SnapshotCommand.serializer); put(Verb.ECHO, EchoMessage.serializer); put(Verb.PAXOS_PREPARE, Commit.serializer); put(Verb.PAXOS_PROPOSE, Commit.serializer); put(Verb.PAXOS_COMMIT, Commit.serializer); put(Verb.HINT, HintMessage.serializer); put(Verb.BATCH_STORE, Batch.serializer); put(Verb.BATCH_REMOVE, UUIDSerializer.serializer); }}; /** * A Map of what kind of serializer to wire up to a REQUEST_RESPONSE callback, based on outbound Verb. */ public static final EnumMap<Verb, IVersionedSerializer<?>> callbackDeserializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class) {{ put(Verb.MUTATION, WriteResponse.serializer); put(Verb.HINT, HintResponse.serializer); put(Verb.READ_REPAIR, WriteResponse.serializer); put(Verb.COUNTER_MUTATION, WriteResponse.serializer); put(Verb.RANGE_SLICE, ReadResponse.rangeSliceSerializer); put(Verb.PAGED_RANGE, ReadResponse.legacyRangeSliceReplySerializer); put(Verb.READ, ReadResponse.serializer); put(Verb.TRUNCATE, TruncateResponse.serializer); put(Verb.SNAPSHOT, null); put(Verb.MIGRATION_REQUEST, MigrationManager.MigrationsSerializer.instance); put(Verb.SCHEMA_CHECK, UUIDSerializer.serializer); put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance); put(Verb.REPLICATION_FINISHED, null); put(Verb.PAXOS_PREPARE, PrepareResponse.serializer); put(Verb.PAXOS_PROPOSE, BooleanSerializer.serializer); put(Verb.BATCH_STORE, WriteResponse.serializer); put(Verb.BATCH_REMOVE, WriteResponse.serializer); }}; /* This records all the results mapped by message Id */ private final ExpiringMap<Integer, CallbackInfo> callbacks; /** * a placeholder class that means "deserialize using the callback." We can't implement this without * special-case code in InboundTcpConnection because there is no way to pass the message id to IVersionedSerializer. */ static class CallbackDeterminedSerializer implements IVersionedSerializer<Object> { public static final CallbackDeterminedSerializer instance = new CallbackDeterminedSerializer(); public Object deserialize(DataInputPlus in, int version) throws IOException { throw new UnsupportedOperationException(); } public void serialize(Object o, DataOutputPlus out, int version) throws IOException { throw new UnsupportedOperationException(); } public long serializedSize(Object o, int version) { throw new UnsupportedOperationException(); } } /* Lookup table for registering message handlers based on the verb. */ private final Map<Verb, IVerbHandler> verbHandlers; private final ConcurrentMap<InetAddress, OutboundTcpConnectionPool> connectionManagers = new NonBlockingHashMap<>(); private static final Logger logger = LoggerFactory.getLogger(MessagingService.class); private static final int LOG_DROPPED_INTERVAL_IN_MS = 5000; private final List<SocketThread> socketThreads = Lists.newArrayList(); private final SimpleCondition listenGate; /** * Verbs it's okay to drop if the request has been queued longer than the request timeout. These * all correspond to client requests or something triggered by them; we don't want to * drop internal messages like bootstrap or repair notifications. */ public static final EnumSet<Verb> DROPPABLE_VERBS = EnumSet.of(Verb._TRACE, Verb.MUTATION, Verb.COUNTER_MUTATION, Verb.HINT, Verb.READ_REPAIR, Verb.READ, Verb.RANGE_SLICE, Verb.PAGED_RANGE, Verb.REQUEST_RESPONSE, Verb.BATCH_STORE, Verb.BATCH_REMOVE); private static final class DroppedMessages { final DroppedMessageMetrics metrics; final AtomicInteger droppedInternalTimeout; final AtomicInteger droppedCrossNodeTimeout; DroppedMessages(Verb verb) { this.metrics = new DroppedMessageMetrics(verb); this.droppedInternalTimeout = new AtomicInteger(0); this.droppedCrossNodeTimeout = new AtomicInteger(0); } } // total dropped message counts for server lifetime private final Map<Verb, DroppedMessages> droppedMessagesMap = new EnumMap<>(Verb.class); private final List<ILatencySubscriber> subscribers = new ArrayList<ILatencySubscriber>(); // protocol versions of the other nodes in the cluster private final ConcurrentMap<InetAddress, Integer> versions = new NonBlockingHashMap<InetAddress, Integer>(); // message sinks are a testing hook private final Set<IMessageSink> messageSinks = new CopyOnWriteArraySet<>(); public void addMessageSink(IMessageSink sink) { messageSinks.add(sink); } public void clearMessageSinks() { messageSinks.clear(); } private static class MSHandle { public static final MessagingService instance = new MessagingService(false); } public static MessagingService instance() { return MSHandle.instance; } private static class MSTestHandle { public static final MessagingService instance = new MessagingService(true); } static MessagingService test() { return MSTestHandle.instance; } private MessagingService(boolean testOnly) { for (Verb verb : DROPPABLE_VERBS) droppedMessagesMap.put(verb, new DroppedMessages(verb)); listenGate = new SimpleCondition(); verbHandlers = new EnumMap<>(Verb.class); if (!testOnly) { Runnable logDropped = new Runnable() { public void run() { logDroppedMessages(); } }; ScheduledExecutors.scheduledTasks.scheduleWithFixedDelay(logDropped, LOG_DROPPED_INTERVAL_IN_MS, LOG_DROPPED_INTERVAL_IN_MS, TimeUnit.MILLISECONDS); } Function<Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>>, ?> timeoutReporter = new Function<Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>>, Object>() { public Object apply(Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>> pair) { final CallbackInfo expiredCallbackInfo = pair.right.value; maybeAddLatency(expiredCallbackInfo.callback, expiredCallbackInfo.target, pair.right.timeout); ConnectionMetrics.totalTimeouts.mark(); getConnectionPool(expiredCallbackInfo.target).incrementTimeout(); if (expiredCallbackInfo.isFailureCallback()) { StageManager.getStage(Stage.INTERNAL_RESPONSE).submit(new Runnable() { @Override public void run() { ((IAsyncCallbackWithFailure)expiredCallbackInfo.callback).onFailure(expiredCallbackInfo.target); } }); } if (expiredCallbackInfo.shouldHint()) { Mutation mutation = ((WriteCallbackInfo) expiredCallbackInfo).mutation(); return StorageProxy.submitHint(mutation, expiredCallbackInfo.target, null); } return null; } }; callbacks = new ExpiringMap<>(DatabaseDescriptor.getMinRpcTimeout(), timeoutReporter); if (!testOnly) { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName(MBEAN_NAME)); } catch (Exception e) { throw new RuntimeException(e); } } } /** * Track latency information for the dynamic snitch * * @param cb the callback associated with this message -- this lets us know if it's a message type we're interested in * @param address the host that replied to the message * @param latency */ public void maybeAddLatency(IAsyncCallback cb, InetAddress address, long latency) { if (cb.isLatencyForSnitch()) addLatency(address, latency); } public void addLatency(InetAddress address, long latency) { for (ILatencySubscriber subscriber : subscribers) subscriber.receiveTiming(address, latency); } /** * called from gossiper when it notices a node is not responding. */ public void convict(InetAddress ep) { logger.debug("Resetting pool for {}", ep); getConnectionPool(ep).reset(); } /** * Listen on the specified port. * * @param localEp InetAddress whose port to listen on. */ public void listen(InetAddress localEp) throws ConfigurationException { callbacks.reset(); // hack to allow tests to stop/restart MS for (ServerSocket ss : getServerSockets(localEp)) { SocketThread th = new SocketThread(ss, "ACCEPT-" + localEp); th.start(); socketThreads.add(th); } listenGate.signalAll(); } @SuppressWarnings("resource") private List<ServerSocket> getServerSockets(InetAddress localEp) throws ConfigurationException { final List<ServerSocket> ss = new ArrayList<ServerSocket>(2); if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.none) { try { ss.add(SSLFactory.getServerSocket(DatabaseDescriptor.getServerEncryptionOptions(), localEp, DatabaseDescriptor.getSSLStoragePort())); } catch (IOException e) { throw new ConfigurationException("Unable to create ssl socket", e); } // setReuseAddress happens in the factory. logger.info("Starting Encrypted Messaging Service on SSL port {}", DatabaseDescriptor.getSSLStoragePort()); } if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.all) { ServerSocketChannel serverChannel = null; try { serverChannel = ServerSocketChannel.open(); } catch (IOException e) { throw new RuntimeException(e); } ServerSocket socket = serverChannel.socket(); try { socket.setReuseAddress(true); } catch (SocketException e) { FileUtils.closeQuietly(socket); throw new ConfigurationException("Insufficient permissions to setReuseAddress", e); } InetSocketAddress address = new InetSocketAddress(localEp, DatabaseDescriptor.getStoragePort()); try { socket.bind(address,500); } catch (BindException e) { FileUtils.closeQuietly(socket); if (e.getMessage().contains("in use")) throw new ConfigurationException(address + " is in use by another process. Change listen_address:storage_port in cassandra.yaml to values that do not conflict with other services"); else if (e.getMessage().contains("Cannot assign requested address")) throw new ConfigurationException("Unable to bind to address " + address + ". Set listen_address in cassandra.yaml to an interface you can bind to, e.g., your private IP address on EC2"); else throw new RuntimeException(e); } catch (IOException e) { FileUtils.closeQuietly(socket); throw new RuntimeException(e); } logger.info("Starting Messaging Service on port {}", DatabaseDescriptor.getStoragePort()); ss.add(socket); } return ss; } public void waitUntilListening() { try { listenGate.await(); } catch (InterruptedException ie) { logger.debug("await interrupted"); } } public boolean isListening() { return listenGate.isSignaled(); } public void destroyConnectionPool(InetAddress to) { OutboundTcpConnectionPool cp = connectionManagers.get(to); if (cp == null) return; cp.close(); connectionManagers.remove(to); } public OutboundTcpConnectionPool getConnectionPool(InetAddress to) { OutboundTcpConnectionPool cp = connectionManagers.get(to); if (cp == null) { cp = new OutboundTcpConnectionPool(to); OutboundTcpConnectionPool existingPool = connectionManagers.putIfAbsent(to, cp); if (existingPool != null) cp = existingPool; else cp.start(); } cp.waitForStarted(); return cp; } public OutboundTcpConnection getConnection(InetAddress to, MessageOut msg) { return getConnectionPool(to).getConnection(msg); } /** * Register a verb and the corresponding verb handler with the * Messaging Service. * * @param verb * @param verbHandler handler for the specified verb */ public void registerVerbHandlers(Verb verb, IVerbHandler verbHandler) { assert !verbHandlers.containsKey(verb); verbHandlers.put(verb, verbHandler); } /** * This method returns the verb handler associated with the registered * verb. If no handler has been registered then null is returned. * * @param type for which the verb handler is sought * @return a reference to IVerbHandler which is the handler for the specified verb */ public IVerbHandler getVerbHandler(Verb type) { return verbHandlers.get(type); } public int addCallback(IAsyncCallback cb, MessageOut message, InetAddress to, long timeout, boolean failureCallback) { assert message.verb != Verb.MUTATION; // mutations need to call the overload with a ConsistencyLevel int messageId = nextId(); CallbackInfo previous = callbacks.put(messageId, new CallbackInfo(to, cb, callbackDeserializers.get(message.verb), failureCallback), timeout); assert previous == null : String.format("Callback already exists for id %d! (%s)", messageId, previous); return messageId; } public int addCallback(IAsyncCallback cb, MessageOut<?> message, InetAddress to, long timeout, ConsistencyLevel consistencyLevel, boolean allowHints) { assert message.verb == Verb.MUTATION || message.verb == Verb.COUNTER_MUTATION || message.verb == Verb.PAXOS_COMMIT; int messageId = nextId(); CallbackInfo previous = callbacks.put(messageId, new WriteCallbackInfo(to, cb, message, callbackDeserializers.get(message.verb), consistencyLevel, allowHints), timeout); assert previous == null : String.format("Callback already exists for id %d! (%s)", messageId, previous); return messageId; } private static final AtomicInteger idGen = new AtomicInteger(0); private static int nextId() { return idGen.incrementAndGet(); } public int sendRR(MessageOut message, InetAddress to, IAsyncCallback cb) { return sendRR(message, to, cb, message.getTimeout(), false); } public int sendRRWithFailure(MessageOut message, InetAddress to, IAsyncCallbackWithFailure cb) { return sendRR(message, to, cb, message.getTimeout(), true); } /** * Send a non-mutation message to a given endpoint. This method specifies a callback * which is invoked with the actual response. * * @param message message to be sent. * @param to endpoint to which the message needs to be sent * @param cb callback interface which is used to pass the responses or * suggest that a timeout occurred to the invoker of the send(). * @param timeout the timeout used for expiration * @return an reference to message id used to match with the result */ public int sendRR(MessageOut message, InetAddress to, IAsyncCallback cb, long timeout, boolean failureCallback) { int id = addCallback(cb, message, to, timeout, failureCallback); sendOneWay(failureCallback ? message.withParameter(FAILURE_CALLBACK_PARAM, ONE_BYTE) : message, id, to); return id; } /** * Send a mutation message or a Paxos Commit to a given endpoint. This method specifies a callback * which is invoked with the actual response. * Also holds the message (only mutation messages) to determine if it * needs to trigger a hint (uses StorageProxy for that). * * @param message message to be sent. * @param to endpoint to which the message needs to be sent * @param handler callback interface which is used to pass the responses or * suggest that a timeout occurred to the invoker of the send(). * @return an reference to message id used to match with the result */ public int sendRR(MessageOut<?> message, InetAddress to, AbstractWriteResponseHandler<?> handler, boolean allowHints) { int id = addCallback(handler, message, to, message.getTimeout(), handler.consistencyLevel, allowHints); sendOneWay(message.withParameter(FAILURE_CALLBACK_PARAM, ONE_BYTE), id, to); return id; } public void sendOneWay(MessageOut message, InetAddress to) { sendOneWay(message, nextId(), to); } public void sendReply(MessageOut message, int id, InetAddress to) { sendOneWay(message, id, to); } /** * Send a message to a given endpoint. This method adheres to the fire and forget * style messaging. * * @param message messages to be sent. * @param to endpoint to which the message needs to be sent */ public void sendOneWay(MessageOut message, int id, InetAddress to) { if (logger.isTraceEnabled()) logger.trace("{} sending {} to {}@{}", FBUtilities.getBroadcastAddress(), message.verb, id, to); if (to.equals(FBUtilities.getBroadcastAddress())) logger.trace("Message-to-self {} going over MessagingService", message); // message sinks are a testing hook for (IMessageSink ms : messageSinks) if (!ms.allowOutgoingMessage(message, id, to)) return; // get pooled connection (really, connection queue) OutboundTcpConnection connection = getConnection(to, message); // write it connection.enqueue(message, id); } public <T> AsyncOneResponse<T> sendRR(MessageOut message, InetAddress to) { AsyncOneResponse<T> iar = new AsyncOneResponse<T>(); sendRR(message, to, iar); return iar; } public void register(ILatencySubscriber subcriber) { subscribers.add(subcriber); } public void clearCallbacksUnsafe() { callbacks.reset(); } /** * Wait for callbacks and don't allow any more to be created (since they could require writing hints) */ public void shutdown() { logger.info("Waiting for messaging service to quiesce"); // We may need to schedule hints on the mutation stage, so it's erroneous to shut down the mutation stage first assert !StageManager.getStage(Stage.MUTATION).isShutdown(); // the important part callbacks.shutdownBlocking(); // attempt to humor tests that try to stop and restart MS try { for (SocketThread th : socketThreads) th.close(); } catch (IOException e) { throw new IOError(e); } } public void receive(MessageIn message, int id, long timestamp, boolean isCrossNodeTimestamp) { TraceState state = Tracing.instance.initializeFromMessage(message); if (state != null) state.trace("{} message received from {}", message.verb, message.from); // message sinks are a testing hook for (IMessageSink ms : messageSinks) if (!ms.allowIncomingMessage(message, id)) return; Runnable runnable = new MessageDeliveryTask(message, id, timestamp, isCrossNodeTimestamp); TracingAwareExecutorService stage = StageManager.getStage(message.getMessageType()); assert stage != null : "No stage for message type " + message.verb; stage.execute(runnable, state); } public void setCallbackForTests(int messageId, CallbackInfo callback) { callbacks.put(messageId, callback); } public CallbackInfo getRegisteredCallback(int messageId) { return callbacks.get(messageId); } public CallbackInfo removeRegisteredCallback(int messageId) { return callbacks.remove(messageId); } /** * @return System.nanoTime() when callback was created. */ public long getRegisteredCallbackAge(int messageId) { return callbacks.getAge(messageId); } public static void validateMagic(int magic) throws IOException { if (magic != PROTOCOL_MAGIC) throw new IOException("invalid protocol header"); } public static int getBits(int packed, int start, int count) { return packed >>> (start + 1) - count & ~(-1 << count); } public boolean areAllNodesAtLeast22() { return allNodesAtLeast22; } public boolean areAllNodesAtLeast30() { return allNodesAtLeast30; } /** * @return the last version associated with address, or @param version if this is the first such version */ public int setVersion(InetAddress endpoint, int version) { logger.debug("Setting version {} for {}", version, endpoint); if (version < VERSION_22) allNodesAtLeast22 = false; if (version < VERSION_30) allNodesAtLeast30 = false; Integer v = versions.put(endpoint, version); // if the version was increased to 2.2 or later see if the min version across the cluster has changed if (v != null && (v < VERSION_30 && version >= VERSION_22)) refreshAllNodeMinVersions(); return v == null ? version : v; } public void resetVersion(InetAddress endpoint) { logger.debug("Resetting version for {}", endpoint); Integer removed = versions.remove(endpoint); if (removed != null && removed <= VERSION_30) refreshAllNodeMinVersions(); } private void refreshAllNodeMinVersions() { boolean anyNodeLowerThan30 = false; for (Integer version : versions.values()) { if (version < MessagingService.VERSION_30) { anyNodeLowerThan30 = true; allNodesAtLeast30 = false; } if (version < MessagingService.VERSION_22) { allNodesAtLeast22 = false; return; } } allNodesAtLeast22 = true; allNodesAtLeast30 = !anyNodeLowerThan30; } public int getVersion(InetAddress endpoint) { Integer v = versions.get(endpoint); if (v == null) { // we don't know the version. assume current. we'll know soon enough if that was incorrect. logger.trace("Assuming current protocol version for {}", endpoint); return MessagingService.current_version; } else return Math.min(v, MessagingService.current_version); } public int getVersion(String endpoint) throws UnknownHostException { return getVersion(InetAddress.getByName(endpoint)); } public int getRawVersion(InetAddress endpoint) { Integer v = versions.get(endpoint); if (v == null) throw new IllegalStateException("getRawVersion() was called without checking knowsVersion() result first"); return v; } public boolean knowsVersion(InetAddress endpoint) { return versions.containsKey(endpoint); } public void incrementDroppedMessages(Verb verb) { incrementDroppedMessages(verb, false); } public void incrementDroppedMessages(Verb verb, boolean isCrossNodeTimeout) { assert DROPPABLE_VERBS.contains(verb) : "Verb " + verb + " should not legally be dropped"; incrementDroppedMessages(droppedMessagesMap.get(verb), isCrossNodeTimeout); } private void incrementDroppedMessages(DroppedMessages droppedMessages, boolean isCrossNodeTimeout) { droppedMessages.metrics.dropped.mark(); if (isCrossNodeTimeout) droppedMessages.droppedCrossNodeTimeout.incrementAndGet(); else droppedMessages.droppedInternalTimeout.incrementAndGet(); } private void logDroppedMessages() { List<String> logs = getDroppedMessagesLogs(); for (String log : logs) logger.info(log); if (logs.size() > 0) StatusLogger.log(); } @VisibleForTesting List<String> getDroppedMessagesLogs() { List<String> ret = new ArrayList<>(); for (Map.Entry<Verb, DroppedMessages> entry : droppedMessagesMap.entrySet()) { Verb verb = entry.getKey(); DroppedMessages droppedMessages = entry.getValue(); int droppedInternalTimeout = droppedMessages.droppedInternalTimeout.getAndSet(0); int droppedCrossNodeTimeout = droppedMessages.droppedCrossNodeTimeout.getAndSet(0); if (droppedInternalTimeout > 0 || droppedCrossNodeTimeout > 0) { ret.add(String.format("%s messages were dropped in last %d ms: %d for internal timeout and %d for cross node timeout", verb, LOG_DROPPED_INTERVAL_IN_MS, droppedInternalTimeout, droppedCrossNodeTimeout)); } } return ret; } private static class SocketThread extends Thread { private final ServerSocket server; private final Set<Closeable> connections = Sets.newConcurrentHashSet(); SocketThread(ServerSocket server, String name) { super(name); this.server = server; } @SuppressWarnings("resource") public void run() { while (!server.isClosed()) { Socket socket = null; try { socket = server.accept(); if (!authenticate(socket)) { logger.debug("remote failed to authenticate"); socket.close(); continue; } socket.setKeepAlive(true); socket.setSoTimeout(2 * OutboundTcpConnection.WAIT_FOR_VERSION_MAX_TIME); // determine the connection type to decide whether to buffer DataInputStream in = new DataInputStream(socket.getInputStream()); MessagingService.validateMagic(in.readInt()); int header = in.readInt(); boolean isStream = MessagingService.getBits(header, 3, 1) == 1; int version = MessagingService.getBits(header, 15, 8); logger.debug("Connection version {} from {}", version, socket.getInetAddress()); socket.setSoTimeout(0); Thread thread = isStream ? new IncomingStreamingConnection(version, socket, connections) : new IncomingTcpConnection(version, MessagingService.getBits(header, 2, 1) == 1, socket, connections); thread.start(); connections.add((Closeable) thread); } catch (AsynchronousCloseException e) { // this happens when another thread calls close(). logger.debug("Asynchronous close seen by server thread"); break; } catch (ClosedChannelException e) { logger.debug("MessagingService server thread already closed"); break; } catch (IOException e) { logger.debug("Error reading the socket " + socket, e); FileUtils.closeQuietly(socket); } } logger.info("MessagingService has terminated the accept() thread"); } void close() throws IOException { logger.debug("Closing accept() thread"); try { server.close(); } catch (IOException e) { // dirty hack for clean shutdown on OSX w/ Java >= 1.8.0_20 // see https://issues.apache.org/jira/browse/CASSANDRA-8220 // see https://bugs.openjdk.java.net/browse/JDK-8050499 if (!"Unknown error: 316".equals(e.getMessage()) || !"Mac OS X".equals(System.getProperty("os.name"))) throw e; } for (Closeable connection : connections) { connection.close(); } } private boolean authenticate(Socket socket) { return DatabaseDescriptor.getInternodeAuthenticator().authenticate(socket.getInetAddress(), socket.getPort()); } } public Map<String, Integer> getLargeMessagePendingTasks() { Map<String, Integer> pendingTasks = new HashMap<String, Integer>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().largeMessages.getPendingMessages()); return pendingTasks; } public int getLargeMessagePendingTasks(InetAddress address) { OutboundTcpConnectionPool connection = connectionManagers.get(address); return connection == null ? 0 : connection.largeMessages.getPendingMessages(); } public Map<String, Long> getLargeMessageCompletedTasks() { Map<String, Long> completedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().largeMessages.getCompletedMesssages()); return completedTasks; } public Map<String, Long> getLargeMessageDroppedTasks() { Map<String, Long> droppedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) droppedTasks.put(entry.getKey().getHostAddress(), entry.getValue().largeMessages.getDroppedMessages()); return droppedTasks; } public Map<String, Integer> getSmallMessagePendingTasks() { Map<String, Integer> pendingTasks = new HashMap<String, Integer>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().smallMessages.getPendingMessages()); return pendingTasks; } public Map<String, Long> getSmallMessageCompletedTasks() { Map<String, Long> completedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().smallMessages.getCompletedMesssages()); return completedTasks; } public Map<String, Long> getSmallMessageDroppedTasks() { Map<String, Long> droppedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) droppedTasks.put(entry.getKey().getHostAddress(), entry.getValue().smallMessages.getDroppedMessages()); return droppedTasks; } public Map<String, Integer> getGossipMessagePendingTasks() { Map<String, Integer> pendingTasks = new HashMap<String, Integer>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().gossipMessages.getPendingMessages()); return pendingTasks; } public Map<String, Long> getGossipMessageCompletedTasks() { Map<String, Long> completedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().gossipMessages.getCompletedMesssages()); return completedTasks; } public Map<String, Long> getGossipMessageDroppedTasks() { Map<String, Long> droppedTasks = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet()) droppedTasks.put(entry.getKey().getHostAddress(), entry.getValue().gossipMessages.getDroppedMessages()); return droppedTasks; } public Map<String, Integer> getDroppedMessages() { Map<String, Integer> map = new HashMap<>(droppedMessagesMap.size()); for (Map.Entry<Verb, DroppedMessages> entry : droppedMessagesMap.entrySet()) map.put(entry.getKey().toString(), (int) entry.getValue().metrics.dropped.getCount()); return map; } public long getTotalTimeouts() { return ConnectionMetrics.totalTimeouts.getCount(); } public Map<String, Long> getTimeoutsPerHost() { Map<String, Long> result = new HashMap<String, Long>(connectionManagers.size()); for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry: connectionManagers.entrySet()) { String ip = entry.getKey().getHostAddress(); long recent = entry.getValue().getTimeouts(); result.put(ip, recent); } return result; } public static IPartitioner globalPartitioner() { return StorageService.instance.getTokenMetadata().partitioner; } public static void validatePartitioner(Collection<? extends AbstractBounds<?>> allBounds) { for (AbstractBounds<?> bounds : allBounds) validatePartitioner(bounds); } public static void validatePartitioner(AbstractBounds<?> bounds) { if (globalPartitioner() != bounds.left.getPartitioner()) throw new AssertionError(String.format("Partitioner in bounds serialization. Expected %s, was %s.", globalPartitioner().getClass().getName(), bounds.left.getPartitioner().getClass().getName())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.groupby; import com.google.common.base.Function; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.collections.ResourceHolder; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; import org.apache.druid.guice.annotations.Global; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.guava.BaseSequence; import org.apache.druid.java.util.common.guava.CloseQuietly; import org.apache.druid.java.util.common.guava.FunctionalIterator; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.dimension.DimensionSpec; import org.apache.druid.query.filter.Filter; import org.apache.druid.segment.Cursor; import org.apache.druid.segment.DimensionDictionarySelector; import org.apache.druid.segment.DimensionSelector; import org.apache.druid.segment.StorageAdapter; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.data.IndexedInts; import org.apache.druid.segment.filter.Filters; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.TreeMap; /** */ public class GroupByQueryEngine { private static final int MISSING_VALUE = -1; private final Supplier<GroupByQueryConfig> config; private final NonBlockingPool<ByteBuffer> intermediateResultsBufferPool; @Inject public GroupByQueryEngine( Supplier<GroupByQueryConfig> config, @Global NonBlockingPool<ByteBuffer> intermediateResultsBufferPool ) { this.config = config; this.intermediateResultsBufferPool = intermediateResultsBufferPool; } public Sequence<Row> process(final GroupByQuery query, final StorageAdapter storageAdapter) { if (storageAdapter == null) { throw new ISE( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals(); if (intervals.size() != 1) { throw new IAE("Should only have one interval, got[%s]", intervals); } Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimFilter())); final Sequence<Cursor> cursors = storageAdapter.makeCursors( filter, intervals.get(0), query.getVirtualColumns(), query.getGranularity(), false, null ); final ResourceHolder<ByteBuffer> bufferHolder = intermediateResultsBufferPool.take(); return Sequences.concat( Sequences.withBaggage( Sequences.map( cursors, new Function<Cursor, Sequence<Row>>() { @Override public Sequence<Row> apply(final Cursor cursor) { return new BaseSequence<>( new BaseSequence.IteratorMaker<Row, RowIterator>() { @Override public RowIterator make() { return new RowIterator(query, cursor, bufferHolder.get(), config.get()); } @Override public void cleanup(RowIterator iterFromMake) { CloseQuietly.close(iterFromMake); } } ); } } ), new Closeable() { @Override public void close() { CloseQuietly.close(bufferHolder); } } ) ); } private static class RowUpdater { private final ByteBuffer metricValues; private final BufferAggregator[] aggregators; private final PositionMaintainer positionMaintainer; private final Map<ByteBuffer, Integer> positions = new TreeMap<>(); // GroupBy queries tend to do a lot of reads from this. We co-store a hash map to make those reads go faster. private final Map<ByteBuffer, Integer> positionsHash = new HashMap<>(); public RowUpdater( ByteBuffer metricValues, BufferAggregator[] aggregators, PositionMaintainer positionMaintainer ) { this.metricValues = metricValues; this.aggregators = aggregators; this.positionMaintainer = positionMaintainer; } public int getNumRows() { return positions.size(); } public Map<ByteBuffer, Integer> getPositions() { return positions; } @Nullable private List<ByteBuffer> updateValues(ByteBuffer key, List<DimensionSelector> dims) { if (dims.size() > 0) { final DimensionSelector dimSelector = dims.get(0); final IndexedInts row = dimSelector.getRow(); final int rowSize = row.size(); if (rowSize == 0) { ByteBuffer newKey = key.duplicate(); newKey.putInt(MISSING_VALUE); return updateValues(newKey, dims.subList(1, dims.size())); } else { List<ByteBuffer> retVal = null; for (int i = 0; i < rowSize; i++) { ByteBuffer newKey = key.duplicate(); int dimValue = row.get(i); newKey.putInt(dimValue); List<ByteBuffer> unaggregatedBuffers = updateValues(newKey, dims.subList(1, dims.size())); if (unaggregatedBuffers != null) { if (retVal == null) { retVal = new ArrayList<>(); } retVal.addAll(unaggregatedBuffers); } } return retVal; } } else { key.clear(); Integer position = positionsHash.get(key); int[] increments = positionMaintainer.getIncrements(); int thePosition; if (position == null) { ByteBuffer keyCopy = ByteBuffer.allocate(key.limit()); keyCopy.put(key.asReadOnlyBuffer()); keyCopy.clear(); position = positionMaintainer.getNext(); if (position == null) { return Collections.singletonList(keyCopy); } positions.put(keyCopy, position); positionsHash.put(keyCopy, position); thePosition = position; for (int i = 0; i < aggregators.length; ++i) { aggregators[i].init(metricValues, thePosition); thePosition += increments[i]; } } thePosition = position; for (int i = 0; i < aggregators.length; ++i) { aggregators[i].aggregate(metricValues, thePosition); thePosition += increments[i]; } return null; } } } private static class PositionMaintainer { private final int[] increments; private final int increment; private final int max; private long nextVal; public PositionMaintainer( int start, int[] increments, int max ) { this.nextVal = (long) start; this.increments = increments; int theIncrement = 0; for (int inc : increments) { theIncrement += inc; } increment = theIncrement; this.max = max - increment; // Make sure there is enough room for one more increment } @Nullable public Integer getNext() { if (nextVal > max) { return null; } else { int retVal = (int) nextVal; nextVal += increment; return retVal; } } public int getIncrement() { return increment; } public int[] getIncrements() { return increments; } } private static class RowIterator implements CloseableIterator<Row> { private final GroupByQuery query; private final Cursor cursor; private final ByteBuffer metricsBuffer; private final int maxIntermediateRows; private final List<DimensionSelector> dimensions; private final ArrayList<String> dimNames; private final BufferAggregator[] aggregators; private final String[] metricNames; private final int[] sizesRequired; @Nullable private List<ByteBuffer> unprocessedKeys; private Iterator<Row> delegate; public RowIterator(GroupByQuery query, final Cursor cursor, ByteBuffer metricsBuffer, GroupByQueryConfig config) { final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); this.query = query; this.cursor = cursor; this.metricsBuffer = metricsBuffer; this.maxIntermediateRows = querySpecificConfig.getMaxIntermediateRows(); unprocessedKeys = null; delegate = Collections.emptyIterator(); List<DimensionSpec> dimensionSpecs = query.getDimensions(); dimensions = Lists.newArrayListWithExpectedSize(dimensionSpecs.size()); dimNames = Lists.newArrayListWithExpectedSize(dimensionSpecs.size()); for (final DimensionSpec dimSpec : dimensionSpecs) { if (dimSpec.getOutputType() != ValueType.STRING) { throw new UnsupportedOperationException( "GroupBy v1 only supports dimensions with an outputType of STRING." ); } final DimensionSelector selector = cursor.getColumnSelectorFactory().makeDimensionSelector(dimSpec); if (selector.getValueCardinality() == DimensionDictionarySelector.CARDINALITY_UNKNOWN) { throw new UnsupportedOperationException( "GroupBy v1 does not support dimension selectors with unknown cardinality."); } dimensions.add(selector); dimNames.add(dimSpec.getOutputName()); } List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs(); aggregators = new BufferAggregator[aggregatorSpecs.size()]; metricNames = new String[aggregatorSpecs.size()]; sizesRequired = new int[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); ++i) { AggregatorFactory aggregatorSpec = aggregatorSpecs.get(i); aggregators[i] = aggregatorSpec.factorizeBuffered(cursor.getColumnSelectorFactory()); metricNames[i] = aggregatorSpec.getName(); sizesRequired[i] = aggregatorSpec.getMaxIntermediateSizeWithNulls(); } } @Override public boolean hasNext() { return delegate.hasNext() || !cursor.isDone(); } @Override public Row next() { if (delegate.hasNext()) { return delegate.next(); } if (unprocessedKeys == null && cursor.isDone()) { throw new NoSuchElementException(); } final PositionMaintainer positionMaintainer = new PositionMaintainer(0, sizesRequired, metricsBuffer.remaining()); final RowUpdater rowUpdater = new RowUpdater(metricsBuffer, aggregators, positionMaintainer); if (unprocessedKeys != null) { for (ByteBuffer key : unprocessedKeys) { final List<ByteBuffer> unprocUnproc = rowUpdater.updateValues(key, ImmutableList.of()); if (unprocUnproc != null) { throw new ISE("Not enough memory to process the request."); } } cursor.advance(); } while (!cursor.isDone() && rowUpdater.getNumRows() < maxIntermediateRows) { ByteBuffer key = ByteBuffer.allocate(dimensions.size() * Integer.BYTES); unprocessedKeys = rowUpdater.updateValues(key, dimensions); if (unprocessedKeys != null) { break; } cursor.advance(); } if (rowUpdater.getPositions().isEmpty() && unprocessedKeys != null) { throw new ISE( "Not enough memory to process even a single item. Required [%,d] memory, but only have[%,d]", positionMaintainer.getIncrement(), metricsBuffer.remaining() ); } delegate = FunctionalIterator .create(rowUpdater.getPositions().entrySet().iterator()) .transform( new Function<Map.Entry<ByteBuffer, Integer>, Row>() { private final DateTime timestamp = cursor.getTime(); private final int[] increments = positionMaintainer.getIncrements(); @Override public Row apply(@Nullable Map.Entry<ByteBuffer, Integer> input) { Map<String, Object> theEvent = Maps.newLinkedHashMap(); ByteBuffer keyBuffer = input.getKey().duplicate(); for (int i = 0; i < dimensions.size(); ++i) { final DimensionSelector dimSelector = dimensions.get(i); final int dimVal = keyBuffer.getInt(); if (MISSING_VALUE != dimVal) { theEvent.put(dimNames.get(i), dimSelector.lookupName(dimVal)); } } int position = input.getValue(); for (int i = 0; i < aggregators.length; ++i) { theEvent.put(metricNames[i], aggregators[i].get(metricsBuffer, position)); position += increments[i]; } for (PostAggregator postAggregator : query.getPostAggregatorSpecs()) { theEvent.put(postAggregator.getName(), postAggregator.compute(theEvent)); } return new MapBasedRow(timestamp, theEvent); } } ); return delegate.next(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public void close() { // cleanup for (BufferAggregator agg : aggregators) { agg.close(); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.creadur.whisker.scan; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.Queue; import java.util.Set; import java.util.TreeSet; /** * Scans directories for resources, within a file system. */ public class FromFileSystem { /** * Base constructor. */ public FromFileSystem() { super(); } /** * Builds description based on given directory. * @param base names the base directory, not null * @return collected directories within the base, not null * @throws IOException when the scanning fails */ public Collection<Directory> withBase(final String base) throws IOException { return new Builder(base).build(); } /** * Builds a description of a file system. */ private final static class Builder { /** Initial capacity for the backing array. */ private static final int DEFAULT_INITIAL_CAPACITY = 64; /** Directory scanning base. */ private final File base; /** Directories scanned. */ private final Set<Directory> directories; /** Queues work not yet complete. */ private final Queue<Work> workInProgress; /** Stores work done. */ private final Collection<Work> workDone; /** * Constructs a builder with given base * (and default backing array). * @param base not null */ public Builder(final String base) { this(base, DEFAULT_INITIAL_CAPACITY); } /** * Constructs a builder. * @param base not null * @param initialCapacity initial capacity for backing array */ public Builder(final String base, final int initialCapacity) { super(); this.base = new File(base); directories = new TreeSet<Directory>(); workInProgress = new LinkedList<Work>(); workDone = new ArrayList<Work>(initialCapacity); } /** * Builds directories. * @return not null * @throws IOException when scanning fails */ public Collection<Directory> build() throws IOException { put(base).andWork().untilDone(); return directories; } /** * Waiting until work done. */ private void untilDone() { } /** * Adds file work to the queue. * @param file not null * @return this, not null */ private Builder put(final File file) { return put(new Work(file)); } /** * Queues work. * @param work not null * @return this, not null */ private Builder put(final Work work) { if (work != null) { if (workDone.contains(work)) { alreadyDone(work); } else { this.workInProgress.add(work); } } return this; } /** * Notes that work has already been done. * @param work not null */ private void alreadyDone(final Work work) { System.out.println("Already done " + work); } /** * Starts work. * @return this, not null */ private Builder andWork() { while (!workInProgress.isEmpty()) { workDone.add(workOn(workInProgress.poll())); } return this; } /** * Performs work. * @param next not null * @return the work done, not null */ private Work workOn(final Work next) { for (final String name: next.contents()) { put(next.whenDirectory(name)); } directories.add(next.build()); return next; } /** * Computes the contents of a directory. */ private static final class Work { /** Represents base directory. */ private static final String BASE_DIRECTORY = "."; /** Names the directory. */ private final String name; /** The directory worked on. */ private final File file; /** * Constructs work. * @param file not null */ public Work(final File file) { this(BASE_DIRECTORY, file); } /** * Constructs work. * @param name not null * @param file not null */ public Work(final String name, final File file) { if (!file.exists()) { throw new IllegalArgumentException( "Expected '" + file.getAbsolutePath() + "' to exist"); } if (!file.isDirectory()) { throw new IllegalArgumentException( "Expected '" + file.getAbsolutePath() + "' to be a directory"); } this.name = name; this.file = file; } /** * Gets the contents of the work directory. * @return not null */ public String[] contents() { final String[] contents = file.list(); if (contents == null) { throw new IllegalArgumentException("Cannot list content of " + file); } return contents; } /** * Builds a directory. * @return not null */ public Directory build() { final Directory result = new Directory().setName(name); for (final String name : contents()) { if (isResource(name)) { result.addResource(name); } } return result; } /** * Is the named file a resource? * @param name not null * @return true when the named file is a resource, * false otherwise */ private boolean isResource(final String name) { return !isDirectory(name); } /** * Is the named file a directory? * @param name not null * @return true when the named file is a directory, * false otherwise */ private boolean isDirectory(final String name) { return file(name).isDirectory(); } /** * Creates new work. * @param name not null * @return work for the named directory, * or null when the resource named is not a directory */ public Work whenDirectory(final String name) { final File file = file(name); final Work result; if (file.isDirectory()) { result = new Work(path(name), file); } else { result = null; } return result; } /** * Converts a name to a path relative to base. * @param name not null * @return not null */ private String path(final String name) { final String result; if (isBaseDirectory()) { result = name; } else { result = this.name + "/" + name; } return result; } /** * This the work done in the base directory. * @return true when this is the base, false otherwise. */ private boolean isBaseDirectory() { return BASE_DIRECTORY.equals(this.name); } /** * Creates a file. * @param name not null * @return file with given name */ private File file(String name) { return new File(this.file, name); } /** * Computes some suitable hash. * @return a hash code * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((file == null) ? 0 : file.hashCode()); result = prime * result + ((name == null) ? 0 : name.hashCode()); return result; } /** * Equal when both name and file are equal. * @param obj possibly null * @return true when equal, false otherwise * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Work other = (Work) obj; if (file == null) { if (other.file != null) return false; } else if (!file.equals(other.file)) return false; if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } return true; } /** * Something suitable for logging. * @return not null * @see java.lang.Object#toString() */ @Override public String toString() { return "Work [name=" + name + ", file=" + file + "]"; } } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.openapi.wm.impl; import com.intellij.diagnostic.LoadingState; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.StatusBar; import com.intellij.ui.BalloonLayout; import com.intellij.util.ui.EdtInvocationManager; import com.intellij.util.ui.JBInsets; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleContext; import javax.swing.*; import java.awt.*; import java.util.Objects; @ApiStatus.Internal public final class IdeFrameImpl extends JFrame implements IdeFrame, DataProvider { public static final String NORMAL_STATE_BOUNDS = "normalBounds"; //When this client property is used (Boolean.TRUE is set for the key) we have to ignore 'resizing' events and not spoil 'normal bounds' value for frame public static final String TOGGLING_FULL_SCREEN_IN_PROGRESS = "togglingFullScreenInProgress"; @Nullable private FrameHelper myFrameHelper; @Nullable private FrameDecorator myFrameDecorator; @Nullable @Override public Object getData(@NotNull String dataId) { return myFrameHelper == null ? null : myFrameHelper.getData(dataId); } @Nullable FrameHelper getFrameHelper() { return myFrameHelper; } interface FrameHelper extends DataProvider { @Nls String getAccessibleName(); void dispose(); void setTitle(@Nullable String title); void updateView(); @Nullable Project getProject(); @NotNull IdeFrame getHelper(); } interface FrameDecorator { boolean isInFullScreen(); default void frameInit() { } default void frameShow() { } } @Override public void addNotify() { super.addNotify(); if (myFrameDecorator != null) { myFrameDecorator.frameInit(); } } // expose setRootPane @Override public void setRootPane(JRootPane root) { super.setRootPane(root); } void setFrameHelper(@Nullable FrameHelper frameHelper, @Nullable FrameDecorator frameDecorator) { myFrameHelper = frameHelper; myFrameDecorator = frameDecorator; } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleIdeFrameImpl(); } return accessibleContext; } @Override public void setTitle(@Nullable String title) { if (myFrameHelper == null) { super.setTitle(title); } else { myFrameHelper.setTitle(title); } } void doSetTitle(String value) { super.setTitle(value); } @Override public void setExtendedState(int state) { // do not load FrameInfoHelper class if (LoadingState.COMPONENTS_REGISTERED.isOccurred() && getExtendedState() == Frame.NORMAL && FrameInfoHelper.isMaximized(state)) { getRootPane().putClientProperty(NORMAL_STATE_BOUNDS, getBounds()); } super.setExtendedState(state); } @Override public void paint(@NotNull Graphics g) { if (LoadingState.COMPONENTS_REGISTERED.isOccurred()) { UISettings.setupAntialiasing(g); } super.paint(g); } @Override @SuppressWarnings({"SSBasedInspection", "deprecation"}) public void show() { super.show(); SwingUtilities.invokeLater(() -> { setFocusableWindowState(true); if (myFrameDecorator != null) { myFrameDecorator.frameShow(); } }); } @NotNull @Override public Insets getInsets() { return SystemInfoRt.isMac && isInFullScreen() ? JBInsets.emptyInsets() : super.getInsets(); } @Override public boolean isInFullScreen() { return myFrameDecorator != null && myFrameDecorator.isInFullScreen(); } @Override public void dispose() { if (myFrameHelper == null) { doDispose(); } else { myFrameHelper.dispose(); } } void doDispose() { EdtInvocationManager.invokeLaterIfNeeded(() -> super.dispose()); } protected final class AccessibleIdeFrameImpl extends AccessibleJFrame { @Override public String getAccessibleName() { return myFrameHelper == null ? super.getAccessibleName() : myFrameHelper.getAccessibleName(); } } @Nullable public static Window getActiveFrame() { for (Frame frame : Frame.getFrames()) { if (frame.isActive()) { return frame; } } return null; } /** * @deprecated Use {@link ProjectFrameHelper#getProject()} instead. */ @Override @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2020.1") public Project getProject() { return myFrameHelper == null ? null : myFrameHelper.getProject(); } // deprecated stuff - as IdeFrame must be implemented (a lot of instanceof checks for JFrame) @Nullable @Override public StatusBar getStatusBar() { return myFrameHelper == null ? null : myFrameHelper.getHelper().getStatusBar(); } @NotNull @Override public Rectangle suggestChildFrameBounds() { return Objects.requireNonNull(myFrameHelper).getHelper().suggestChildFrameBounds(); } @Override public void setFrameTitle(String title) { if (myFrameHelper != null) { myFrameHelper.getHelper().setFrameTitle(title); } } @Override public JComponent getComponent() { return getRootPane(); } @Nullable @Override public BalloonLayout getBalloonLayout() { return myFrameHelper == null ? null : myFrameHelper.getHelper().getBalloonLayout(); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.rds.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p/> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/AddTagsToResource" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AddTagsToResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For information * about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing"> * Constructing an RDS Amazon Resource Name (ARN)</a>. * </p> */ private String resourceName; /** * <p> * The tags to be assigned to the Amazon RDS resource. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For information * about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing"> * Constructing an RDS Amazon Resource Name (ARN)</a>. * </p> * * @param resourceName * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For * information about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing" * > Constructing an RDS Amazon Resource Name (ARN)</a>. */ public void setResourceName(String resourceName) { this.resourceName = resourceName; } /** * <p> * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For information * about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing"> * Constructing an RDS Amazon Resource Name (ARN)</a>. * </p> * * @return The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For * information about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing" * > Constructing an RDS Amazon Resource Name (ARN)</a>. */ public String getResourceName() { return this.resourceName; } /** * <p> * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For information * about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing"> * Constructing an RDS Amazon Resource Name (ARN)</a>. * </p> * * @param resourceName * The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For * information about creating an ARN, see <a href= * "http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.ARN.html#USER_Tagging.ARN.Constructing" * > Constructing an RDS Amazon Resource Name (ARN)</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public AddTagsToResourceRequest withResourceName(String resourceName) { setResourceName(resourceName); return this; } /** * <p> * The tags to be assigned to the Amazon RDS resource. * </p> * * @return The tags to be assigned to the Amazon RDS resource. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * The tags to be assigned to the Amazon RDS resource. * </p> * * @param tags * The tags to be assigned to the Amazon RDS resource. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * The tags to be assigned to the Amazon RDS resource. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The tags to be assigned to the Amazon RDS resource. * @return Returns a reference to this object so that method calls can be chained together. */ public AddTagsToResourceRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The tags to be assigned to the Amazon RDS resource. * </p> * * @param tags * The tags to be assigned to the Amazon RDS resource. * @return Returns a reference to this object so that method calls can be chained together. */ public AddTagsToResourceRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceName() != null) sb.append("ResourceName: ").append(getResourceName()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AddTagsToResourceRequest == false) return false; AddTagsToResourceRequest other = (AddTagsToResourceRequest) obj; if (other.getResourceName() == null ^ this.getResourceName() == null) return false; if (other.getResourceName() != null && other.getResourceName().equals(this.getResourceName()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceName() == null) ? 0 : getResourceName().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public AddTagsToResourceRequest clone() { return (AddTagsToResourceRequest) super.clone(); } }
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of irondetect, version 0.0.8, * implemented by the Trust@HsH research group at the Hochschule Hannover. * %% * Copyright (C) 2010 - 2015 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.hshannover.f4.trust.procedure; import static org.junit.Assert.assertEquals; import org.junit.Test; import de.hshannover.f4.trust.irondetect.model.ProcedureResult; import de.hshannover.f4.trust.irondetect.procedure.ProcedureResultMapper; import de.hshannover.f4.trust.irondetect.procedure.ProcedureResultMapper.Boundary; import de.hshannover.f4.trust.irondetect.procedure.ProcedureResultMapper.DistanceType; public class ProcedureResultMapperTest { private double precision = 0.001; @Test public void testPercentLow() { ProcedureResult pr; pr = ProcedureResultMapper.map(91, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(89, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(49, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(110, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(200, 100, DistanceType.percent, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); } @Test public void testPercentHigh() { ProcedureResult pr; pr = ProcedureResultMapper.map(110, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(111, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(151, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(90, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(0, 100, DistanceType.percent, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); } @Test public void testPercentBoth() { ProcedureResult pr; pr = ProcedureResultMapper.map(110, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(111, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(151, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(90, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(0, 100, DistanceType.percent, Boundary.both, 10, 50); assertEquals(1.0, pr.getValue(), precision); } @Test public void testAbsoluteLow() { ProcedureResult pr; pr = ProcedureResultMapper.map(90, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(89, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(49, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(110, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(200, 100, DistanceType.absolute, Boundary.low, 10, 50); assertEquals(-1.0, pr.getValue(), precision); } @Test public void testAbsoluteHigh() { ProcedureResult pr; pr = ProcedureResultMapper.map(110, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(111, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(151, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(90, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(0, 100, DistanceType.absolute, Boundary.high, 10, 50); assertEquals(-1.0, pr.getValue(), precision); } @Test public void testAbsoluteBoth() { ProcedureResult pr; pr = ProcedureResultMapper.map(110, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(111, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(150, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(151, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(90, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(-1.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(50, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(0.0, pr.getValue(), precision); pr = ProcedureResultMapper.map(0, 100, DistanceType.absolute, Boundary.both, 10, 50); assertEquals(1.0, pr.getValue(), precision); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.colors; import com.intellij.application.options.schemes.AbstractSchemeActions; import com.intellij.application.options.schemes.AbstractSchemesPanel; import com.intellij.application.options.schemes.SchemeNameGenerator; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.impl.AbstractColorsScheme; import com.intellij.openapi.editor.colors.impl.DefaultColorsScheme; import com.intellij.openapi.editor.colors.impl.EditorColorsSchemeImpl; import com.intellij.openapi.editor.colors.impl.EmptyColorScheme; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.options.*; import com.intellij.openapi.project.DefaultProjectFactory; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.components.JBList; import com.intellij.util.ui.JBUI; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public abstract class ColorSchemeActions extends AbstractSchemeActions<EditorColorsScheme> { protected ColorSchemeActions(@NotNull AbstractSchemesPanel<EditorColorsScheme, ?> schemesPanel) { super(schemesPanel); } @Override protected Collection<String> getSchemeImportersNames() { List<String> importersNames = new ArrayList<>(); for (ImportHandler importHandler : Extensions.getExtensions(ImportHandler.EP_NAME)) { importersNames.add(importHandler.getTitle()); } importersNames.addAll(super.getSchemeImportersNames()); return importersNames; } @Override protected void importScheme(@NotNull String importerName) { if (tryImportWithImportHandler(importerName)) { return; } final SchemeImporter<EditorColorsScheme> importer = SchemeImporterEP.getImporter(importerName, EditorColorsScheme.class); if (importer != null) { VirtualFile importSource = SchemeImportUtil.selectImportSource(importer.getSourceExtensions(), getSchemesPanel(), null, "Choose " + importerName); if (importSource != null) { if ("jar".equals(importSource.getExtension())) { importFromJar(getSchemesPanel().getToolbar(), importer, importSource); } else { doImport(importer, importSource); } } } } private void doImport(@NotNull SchemeImporter<EditorColorsScheme> importer, @NotNull VirtualFile importSource) { try { EditorColorsScheme imported = importer.importScheme(DefaultProjectFactory.getInstance().getDefaultProject(), importSource, getOptions().getSelectedScheme(), name -> { String newName = SchemeNameGenerator.getUniqueName(name != null ? name : "Unnamed", candidate -> getSchemesPanel().getModel() .containsScheme(candidate, false)); AbstractColorsScheme newScheme = new EditorColorsSchemeImpl(EmptyColorScheme.INSTANCE); newScheme.setName(newName); newScheme.setDefaultMetaInfo(EmptyColorScheme.INSTANCE); return newScheme; }); if (imported != null) { getOptions().addImportedScheme(imported); getSchemesPanel() .showStatus( ApplicationBundle.message("settings.editor.scheme.import.success", importSource.getPresentableUrl(), imported.getName()), MessageType.INFO); } } catch (SchemeImportException e) { handleError(e, importSource); } } private void handleError(@NotNull SchemeImportException e, @NotNull VirtualFile importSource) { String details = e.getMessage(); getSchemesPanel() .showStatus( ApplicationBundle.message("settings.editor.scheme.import.failure", importSource.getPresentableUrl()) + (StringUtil.isEmpty(details) ? "" : "\n" + details), MessageType.ERROR); } private void importFromJar(@NotNull Component componentAbove, @NotNull SchemeImporter<EditorColorsScheme> importer, @NotNull VirtualFile jarFile) { try { List<VirtualFile> schemeFiles = getSchemeFiles(jarFile); if (schemeFiles.size() == 1 || ApplicationManager.getApplication().isUnitTestMode()) { doImport(importer, schemeFiles.iterator().next()); return; } List<ColorSchemeItem> fileList = new ArrayList<>(schemeFiles.size()); for (VirtualFile file : schemeFiles) { Element root = SchemeImportUtil.loadSchemeDom(file); String name = StringUtil.trimStart(ColorSchemeImporter.getSchemeName(root), SchemeManager.EDITABLE_COPY_PREFIX); fileList.add(new ColorSchemeItem(name, file)); } ImportSchemeChooserDialog dialog = new ImportSchemeChooserDialog(mySchemesPanel, componentAbove, fileList); if (dialog.showAndGet()) { List<ColorSchemeItem> selectedItems = dialog.getSelectedItems(); for (ColorSchemeItem item : selectedItems) { doImport(importer, item.getFile()); } } } catch (SchemeImportException e) { handleError(e, jarFile); } } private static List<VirtualFile> getSchemeFiles(@NotNull VirtualFile jarFile) throws SchemeImportException { List<VirtualFile> schemeFiles = new ArrayList<>(); for (VirtualFile file : jarFile.getChildren()) { if (file.isDirectory() && "colors".equals(file.getName())) { for (VirtualFile schemeFile : file.getChildren()) { String ext = schemeFile.getExtension(); if ("icls".equals(ext) || "xml".equals(ext)) { schemeFiles.add(schemeFile); } } break; } } if (schemeFiles.isEmpty()) { throw new SchemeImportException("The are no color schemes in the chosen file."); } return schemeFiles; } private boolean tryImportWithImportHandler(@NotNull String importerName) { for (ImportHandler importHandler : Extensions.getExtensions(ImportHandler.EP_NAME)) { if (importerName.equals(importHandler.getTitle())) { importHandler.performImport(getSchemesPanel().getToolbar(), scheme -> { if (scheme != null) getOptions().addImportedScheme(scheme); }); return true; } } return false; } @Override protected void resetScheme(@NotNull EditorColorsScheme scheme) { if (Messages .showOkCancelDialog(ApplicationBundle.message("color.scheme.reset.message"), ApplicationBundle.message("color.scheme.reset.title"), Messages.getQuestionIcon()) == Messages.OK) { getOptions().resetSchemeToOriginal(scheme.getName()); } } @Override protected void duplicateScheme(@NotNull EditorColorsScheme scheme, @NotNull String newName) { getOptions().saveSchemeAs(scheme, newName); } @Override protected void exportScheme(@NotNull EditorColorsScheme scheme, @NotNull String exporterName) { EditorColorsScheme schemeToExport = scheme; if (scheme instanceof AbstractColorsScheme) { EditorColorsScheme parent = ((AbstractColorsScheme)scheme).getParentScheme(); if (!(parent instanceof DefaultColorsScheme)) { schemeToExport = parent; } } if (schemeToExport.getName().startsWith(SchemeManager.EDITABLE_COPY_PREFIX)) { schemeToExport = (EditorColorsScheme)schemeToExport.clone(); schemeToExport.setName(SchemeManager.getDisplayName(schemeToExport)); } super.exportScheme(schemeToExport, exporterName); } @Override protected Class<EditorColorsScheme> getSchemeType() { return EditorColorsScheme.class; } @NotNull protected abstract ColorAndFontOptions getOptions(); private static class ImportSchemeChooserDialog extends DialogWrapper { private Component myComponentAbove; private List<ColorSchemeItem> mySchemeItems; private JBList<ColorSchemeItem> mySchemeList; protected ImportSchemeChooserDialog(@NotNull Component parent, @NotNull Component componentAbove, @NotNull List<ColorSchemeItem> schemeItems) { super(parent, false); setTitle(ApplicationBundle.message("settings.editor.scheme.import.chooser.title")); setOKButtonText(ApplicationBundle.message("settings.editor.scheme.import.chooser.button")); myComponentAbove = componentAbove; mySchemeItems = schemeItems; init(); } @Nullable @Override public Point getInitialLocation() { Point location = myComponentAbove.getLocationOnScreen(); location.translate(0, myComponentAbove.getHeight() + JBUI.scale(20)); return location; } @Override protected void createDefaultActions() { super.createDefaultActions(); } @Nullable @Override protected JComponent createCenterPanel() { JPanel schemesPanel = new JPanel(new BorderLayout()); mySchemeList = new JBList<>(mySchemeItems); schemesPanel.add(mySchemeList, BorderLayout.CENTER); return schemesPanel; } public List<ColorSchemeItem> getSelectedItems() { int minIndex = mySchemeList.getMinSelectionIndex(); int maxIndex = mySchemeList.getMaxSelectionIndex(); if (minIndex >= 0 && maxIndex >= minIndex) { return mySchemeItems.subList(minIndex, maxIndex +1); } return Collections.emptyList(); } } private static class ColorSchemeItem { private String myName; private VirtualFile myFile; public ColorSchemeItem(String name, VirtualFile file) { myName = name; myFile = file; } public String getName() { return myName; } public VirtualFile getFile() { return myFile; } @Override public String toString() { return myName; } } }
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.test.perf; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.auraframework.test.SauceUtil; import org.auraframework.test.perf.rdp.RDPNotification; import org.auraframework.util.AuraUITestingUtil; import org.auraframework.util.json.JsonReader; import org.json.JSONException; import org.json.JSONObject; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.logging.LogEntry; import org.openqa.selenium.logging.LogType; import org.openqa.selenium.logging.LoggingPreferences; import org.openqa.selenium.remote.CapabilityType; import org.openqa.selenium.remote.DesiredCapabilities; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Utility WebDriver methods related to performance */ public final class PerfWebDriverUtil { private static final Logger LOG = Logger.getLogger(PerfWebDriverUtil.class.getSimpleName()); private static final LoggingPreferences PERFORMANCE_LOGGING_PREFS; static { // NOTE: need to create single LoggingPreferences object to be reused as LoggingPreferences // doesn't implement hashCode()/equals() correctly PERFORMANCE_LOGGING_PREFS = new LoggingPreferences(); PERFORMANCE_LOGGING_PREFS.enable(LogType.PERFORMANCE, Level.INFO); // logPrefs.enable(LogType.BROWSER, Level.ALL); // Level.FINE for LogType.DRIVER shows all dev tools requests and responses // logPrefs.enable(LogType.DRIVER, Level.WARNING); // N/A in chromedriver: logPrefs.enable(LogType.PROFILER, Level.ALL); // N/A in chromedriver: logPrefs.enable(LogType.CLIENT, Level.ALL); // N/A in chromedriver: logPrefs.enable(LogType.SERVER, Level.ALL); } /** * Adds capabilites to request collecting WebDriver performance data */ public static void addLoggingCapabilities(DesiredCapabilities capabilities) { capabilities.setCapability(CapabilityType.LOGGING_PREFS, PERFORMANCE_LOGGING_PREFS); } /** * Pretty-prints the data from the Resource Timing API */ public static void showResourceTimingData(List<Map<String, Object>> data) { for (Map<String, Object> entry : data) { try { System.out.println("entry: " + new JSONObject(entry).toString(2)); } catch (JSONException e) { throw new RuntimeException(String.valueOf(entry), e); } } } // instance: private final WebDriver driver; private final AuraUITestingUtil auraUITestingUtil; public PerfWebDriverUtil(WebDriver driver, AuraUITestingUtil auraUITestingUtil) { this.driver = driver; this.auraUITestingUtil = auraUITestingUtil; } /** * @return new RDPNotifications since the last call to this method */ public List<RDPNotification> getRDPNotifications() { List<LogEntry> logEntries = getLogEntries(LogType.PERFORMANCE); List<RDPNotification> events = Lists.newArrayList(); for (LogEntry logEntry : logEntries) { if (LOG.isLoggable(Level.FINE)) { LOG.fine("LOG_ENTRY: " + logEntry); } String message = logEntry.getMessage(); // logMessage is: {"message":{"method":"Timeline.eventRecorded","params":{... try { JSONObject json = new JSONObject(message); JSONObject event = json.getJSONObject("message"); String webview = json.getString("webview"); events.add(new RDPNotification(event, webview)); } catch (JSONException e) { LOG.log(Level.WARNING, message, e); } } return events; } public void addTimelineTimeStamp(String label) { ((JavascriptExecutor) driver).executeScript("console.timeStamp('" + label + "')"); } // /** * @param type one of the LogTypes, i.e. LogType.PERFORMANCE * @return log entries accumulated since the last time this method was called */ private List<LogEntry> getLogEntries(String type) { try { return driver.manage().logs().get(type).getAll(); } catch (WebDriverException ignore) { // i.e. log type 'profiler' not found } catch (Exception e) { LOG.log(Level.WARNING, type, e); } return NO_ENTRIES; } private static final List<LogEntry> NO_ENTRIES = ImmutableList.of(); // window.performance /** * See https://developers.google.com/chrome-developer-tools/docs/network and http://www.w3.org/TR/resource-timing * * @return Resource Timing API performance */ public List<Map<String, Object>> getResourceTimingData() { List<Map<String, Object>> entries = (List<Map<String, Object>>) ((JavascriptExecutor) driver) .executeScript("return window.performance.getEntries()"); return entries; } // UIPerfStats: public void clearUIPerfStats() { auraUITestingUtil.getEval("$A.Perf.removeStats()"); } public Map<String, String> getUIPerfStats(String stage, List<String> transactionsToGather) { Map<String, String> stats = Maps.newHashMap(); String json = auraUITestingUtil.getEval("return $A.util.json.encode($A.Perf.toJson())").toString(); json = json.substring(1, json.length() - 1); json = json.replace("\\\"", "\""); StringReader in = new StringReader(json); Map<?, ?> message = (Map<?, ?>) new JsonReader().read(in); @SuppressWarnings("unchecked") ArrayList<HashMap<?, ?>> measures = (ArrayList<HashMap<?, ?>>) message .get("measures"); for (HashMap<?, ?> marks : measures) { if (!transactionsToGather.isEmpty()) { if (!transactionsToGather.contains(marks.get("measure")) && // IE10 list of measures was not in the same order // as expected in transactionsToGather so need to // make sure measure and transactionsToGather are // similar !isSimilar( (String) marks.get("measure"), transactionsToGather.get(0))) { continue; } } String measureName = marks.get("measure").toString() + (stage != null ? ("_" + stage) : ""); stats.put(measureName, marks.get("et").toString()); } return stats; } private static boolean isSimilar(String str1, String str2) { char[] str1Arr = str1.toCharArray(); char[] str2Arr = str1.toCharArray(); Arrays.sort(str1Arr); Arrays.sort(str2Arr); str1 = new String(str1Arr); str2 = new String(str2Arr); return str1.equals(str2); } // JS heap snapshot /** * See https://code.google.com/p/chromedriver/issues/detail?id=519<br/> * Note: slow, each call takes a couple of seconds * * @return JS heap snapshot */ public Map<String, ?> takeHeapSnapshot() { if (SauceUtil.areTestsRunningOnSauce()) { throw new UnsupportedOperationException("required 2.10 chromedriver still not available in SauceLabs"); } return (Map<String, ?>) ((JavascriptExecutor) driver).executeScript(":takeHeapSnapshot"); } /** * Analyzes the data in the snapshot and returns summary data */ public static JSONObject analyzeHeapSnapshot(Map<String, ?> data) { Map<String, ?> metadata = (Map<String, ?>) data.get("snapshot"); int nodeCount = ((Number) metadata.get("node_count")).intValue(); // "node_fields": ["type","name","id","self_size","edge_count"] List<Number> nodes = (List<Number>) data.get("nodes"); int totalSize = 0; for (int i = 0; i < nodeCount; i++) { totalSize += nodes.get(5 * i + 3).intValue(); } JSONObject json = new JSONObject(); try { json.put("node_count", nodeCount); json.put("total_size", totalSize); } catch (JSONException e) { throw new RuntimeException(e); } return json; } }
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.graph.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.util.containers.FList; import com.intellij.util.containers.MultiMap; import com.intellij.util.graph.Graph; import gnu.trove.TObjectIntHashMap; import org.jetbrains.annotations.NotNull; import java.util.*; /** * Algorithm to search k shortest paths between two vertices in unweighted directed graph. * Based on article "Finding the k shortest paths" by D. Eppstein, 1997. * * @author nik */ public class KShortestPathsFinder<Node> { private static final Logger LOG = Logger.getInstance("#com.intellij.util.graph.impl.KShortestPathsFinder"); private final Graph<Node> myGraph; private final Node myStart; private final Node myFinish; private final ProgressIndicator myProgressIndicator; private MultiMap<Node, GraphEdge<Node>> myNonTreeEdges; private List<Node> mySortedNodes; private Map<Node, Node> myNextNodes; private Map<Node, HeapNode<Node>> myOutRoots; private Map<Node,Heap<Node>> myHeaps; public KShortestPathsFinder(@NotNull Graph<Node> graph, @NotNull Node start, @NotNull Node finish, @NotNull ProgressIndicator progressIndicator) { myGraph = graph; myStart = start; myFinish = finish; myProgressIndicator = progressIndicator; } private void computeDistancesToTarget() { myNonTreeEdges = new MultiMap<>(); mySortedNodes = new ArrayList<>(); myNextNodes = new HashMap<>(); TObjectIntHashMap<Node> distances = new TObjectIntHashMap<>(); Deque<Node> nodes = new ArrayDeque<>(); nodes.addLast(myFinish); distances.put(myFinish, 0); while (!nodes.isEmpty()) { myProgressIndicator.checkCanceled(); Node node = nodes.removeFirst(); mySortedNodes.add(node); int d = distances.get(node) + 1; Iterator<Node> iterator = myGraph.getIn(node); while (iterator.hasNext()) { Node prev = iterator.next(); if (distances.containsKey(prev)) { int dPrev = distances.get(prev); myNonTreeEdges.putValue(prev, new GraphEdge<>(prev, node, d - dPrev)); continue; } distances.put(prev, d); myNextNodes.put(prev, node); nodes.addLast(prev); } } } private void buildOutHeaps() { myOutRoots = new HashMap<>(); for (Node node : mySortedNodes) { myProgressIndicator.checkCanceled(); List<HeapNode<Node>> heapNodes = new ArrayList<>(); Collection<GraphEdge<Node>> edges = myNonTreeEdges.get(node); if (edges.isEmpty()) continue; HeapNode<Node> root = null; for (GraphEdge<Node> edge : edges) { HeapNode<Node> heapNode = new HeapNode<>(edge); heapNodes.add(heapNode); if (root == null || root.myEdge.getDelta() > heapNode.myEdge.getDelta()) { root = heapNode; } } LOG.assertTrue(root != null); heapNodes.remove(root); myOutRoots.put(node, root); if (!heapNodes.isEmpty()) { for (int j = 1; j < heapNodes.size(); j++) { HeapNode<Node> heapNode = heapNodes.get(j); HeapNode<Node> parent = heapNodes.get((j+1)/2 - 1); parent.myChildren[(j+1) % 2] = heapNode; } for (int j = heapNodes.size() / 2 - 1; j >= 0; j--) { heapify(heapNodes.get(j)); } root.myChildren[2] = heapNodes.get(0); } } } private void buildMainHeaps() { myHeaps = new HashMap<>(); for (Node node : mySortedNodes) { myProgressIndicator.checkCanceled(); HeapNode<Node> outRoot = myOutRoots.get(node); Node next = myNextNodes.get(node); if (outRoot == null) { if (next != null) { myHeaps.put(node, myHeaps.get(next)); } continue; } final Heap<Node> nextHeap = myHeaps.get(next); if (nextHeap == null) { myHeaps.put(node, new Heap<>(outRoot)); continue; } final Heap<Node> tHeap = nextHeap.insert(outRoot); myHeaps.put(node, tHeap); } } private void heapify(HeapNode<Node> node) { while (true) { HeapNode<Node> min = node; for (int i = 0; i < 2; i++) { HeapNode<Node> child = node.myChildren[i]; if (child != null && child.myEdge.getDelta() < min.myEdge.getDelta()) { min = child; } } if (min != node) { GraphEdge<Node> t = min.myEdge; min.myEdge = node.myEdge; node.myEdge = t; node = min; } else { break; } } } public List<List<Node>> findShortestPaths(int k) { try { if (myStart.equals(myFinish)) { return Collections.singletonList(Collections.singletonList(myStart)); } computeDistancesToTarget(); if (!myNextNodes.containsKey(myStart)) { return Collections.emptyList(); } buildOutHeaps(); buildMainHeaps(); PriorityQueue<Sidetracks<Node>> queue = new PriorityQueue<>(); List<FList<HeapNode<Node>>> sidetracks = new ArrayList<>(); sidetracks.add(FList.emptyList()); final Heap<Node> heap = myHeaps.get(myStart); if (heap != null) { queue.add(new Sidetracks<>(0, FList.<HeapNode<Node>>emptyList().prepend(heap.getRoot()))); for (int i = 2; i <= k; i++) { if (queue.isEmpty()) break; myProgressIndicator.checkCanceled(); final Sidetracks<Node> current = queue.remove(); sidetracks.add(current.myEdges); final HeapNode<Node> e = current.myEdges.getHead(); final Heap<Node> next = myHeaps.get(e.myEdge.getFinish()); if (next != null) { final HeapNode<Node> f = next.getRoot(); queue.add(new Sidetracks<>(current.myLength + f.myEdge.getDelta(), current.myEdges.prepend(f))); } for (HeapNode<Node> child : e.myChildren) { if (child != null) { queue.add(new Sidetracks<>(current.myLength - e.myEdge.getDelta() + child.myEdge.getDelta(), current.myEdges.getTail().prepend(child))); } } } } return computePathsBySidetracks(sidetracks); } catch (ProcessCanceledException e) { return Collections.emptyList(); } } private List<List<Node>> computePathsBySidetracks(List<FList<HeapNode<Node>>> sidetracks) { final List<List<Node>> result = new ArrayList<>(); for (FList<HeapNode<Node>> sidetrack : sidetracks) { myProgressIndicator.checkCanceled(); List<GraphEdge<Node>> edges = new ArrayList<>(); while (!sidetrack.isEmpty()) { edges.add(sidetrack.getHead().myEdge); sidetrack = sidetrack.getTail(); } Node current = myStart; final List<Node> path = new ArrayList<>(); path.add(current); int i = edges.size() - 1; while (!current.equals(myFinish) || i >= 0) { if (i >= 0 && edges.get(i).getStart().equals(current)) { current = edges.get(i).getFinish(); i--; } else { current = myNextNodes.get(current); LOG.assertTrue(current != null); } path.add(current); } result.add(path); } return result; } private static class Sidetracks<Node> implements Comparable<Sidetracks> { private int myLength; private final FList<HeapNode<Node>> myEdges; private Sidetracks(int length, FList<HeapNode<Node>> edges) { myLength = length; myEdges = edges; } @Override public int compareTo(Sidetracks o) { return myLength - o.myLength; } } private static class Heap<Node> { private final int mySize; private HeapNode<Node> myRoot; public Heap(HeapNode<Node> root) { myRoot = root; mySize = 1; } private Heap(int size, HeapNode<Node> root) { mySize = size; myRoot = root; } public HeapNode<Node> getRoot() { return myRoot; } public Heap<Node> insert(HeapNode<Node> node) { int pos = mySize + 1; int pow = 1; while (pos >= pow << 2) { pow <<= 1; } HeapNode<Node> newRoot = myRoot.copy(); HeapNode<Node> place = newRoot; List<HeapNode<Node>> parents = new ArrayList<>(); while (true) { parents.add(place); final int ind = (pos & pow) != 0 ? 1 : 0; if (pow == 1) { place.myChildren[ind] = node; break; } HeapNode<Node> copy = place.myChildren[ind].copy(); place.myChildren[ind] = copy; place = copy; pow >>= 1; } for (int i = parents.size() - 1; i >= 0; i--) { HeapNode<Node> parent = parents.get(i); if (parent.myEdge.getDelta() < node.myEdge.getDelta()) { break; } final GraphEdge<Node> t = parent.myEdge; parent.myEdge = node.myEdge; node.myEdge = t; final HeapNode<Node> t2 = parent.myChildren[2]; parent.myChildren[2] = node.myChildren[2]; node.myChildren[2] = t2; node = parent; } return new Heap<>(mySize + 1, newRoot); } } private static class HeapNode<Node> { public HeapNode<Node>[] myChildren; public GraphEdge<Node> myEdge; private HeapNode(GraphEdge<Node> edge) { myEdge = edge; myChildren = new HeapNode[3]; } public HeapNode(HeapNode<Node> node) { myEdge = node.myEdge; myChildren = node.myChildren.clone(); } public HeapNode<Node> copy() { return new HeapNode<>(this); } } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.widget.cts; import com.android.cts.stub.R; import android.app.Activity; import android.content.Context; import android.content.res.Resources; import android.cts.util.PollingCheck; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.drawable.Drawable; import android.test.ActivityInstrumentationTestCase2; import android.util.AttributeSet; import android.view.KeyEvent; import android.view.MotionEvent; import android.widget.AbsSeekBar; import android.widget.SeekBar; /** * Test {@link AbsSeekBar}. */ public class AbsSeekBarTest extends ActivityInstrumentationTestCase2<ProgressBarStubActivity> { public AbsSeekBarTest() { super("com.android.cts.stub", ProgressBarStubActivity.class); } private Activity mActivity; private Resources mResources; @Override protected void setUp() throws Exception { super.setUp(); mActivity = getActivity(); mResources = mActivity.getResources(); } public void testConstructor() { new MyAbsSeekBar(mActivity); new MyAbsSeekBar(mActivity, null); new MyAbsSeekBar(mActivity, null, com.android.internal.R.attr.progressBarStyle); } public void testAccessThumbOffset() { AbsSeekBar myAbsSeekBar = new MyAbsSeekBar(mActivity); final int positive = 5; final int negative = -5; final int zero = 0; myAbsSeekBar.setThumbOffset(positive); assertEquals(positive, myAbsSeekBar.getThumbOffset()); myAbsSeekBar.setThumbOffset(zero); assertEquals(zero, myAbsSeekBar.getThumbOffset()); myAbsSeekBar.setThumbOffset(negative); assertEquals(negative, myAbsSeekBar.getThumbOffset()); } public void testSetThumb() { MyAbsSeekBar myAbsSeekBar = new MyAbsSeekBar(mActivity); Drawable drawable1 = mResources.getDrawable(R.drawable.scenery); Drawable drawable2 = mResources.getDrawable(R.drawable.pass); assertFalse(myAbsSeekBar.verifyDrawable(drawable1)); assertFalse(myAbsSeekBar.verifyDrawable(drawable2)); myAbsSeekBar.setThumb(drawable1); assertTrue(myAbsSeekBar.verifyDrawable(drawable1)); assertFalse(myAbsSeekBar.verifyDrawable(drawable2)); myAbsSeekBar.setThumb(drawable2); assertFalse(myAbsSeekBar.verifyDrawable(drawable1)); assertTrue(myAbsSeekBar.verifyDrawable(drawable2)); } public void testDrawableStateChanged() { MyAbsSeekBar myAbsSeekBar = new MyAbsSeekBar(mActivity); MockDrawable drawable = new MockDrawable(); myAbsSeekBar.setProgressDrawable(drawable); myAbsSeekBar.setEnabled(false); myAbsSeekBar.drawableStateChanged(); assertEquals(0, drawable.getAlpha()); myAbsSeekBar.setEnabled(true); myAbsSeekBar.drawableStateChanged(); assertEquals(0xFF, drawable.getAlpha()); } public void testVerifyDrawable() { MyAbsSeekBar myAbsSeekBar = new MyAbsSeekBar(mActivity); Drawable drawable1 = mResources.getDrawable(R.drawable.scenery); Drawable drawable2 = mResources.getDrawable(R.drawable.pass); Drawable drawable3 = mResources.getDrawable(R.drawable.blue); Drawable drawable4 = mResources.getDrawable(R.drawable.black); assertFalse(myAbsSeekBar.verifyDrawable(drawable1)); assertFalse(myAbsSeekBar.verifyDrawable(drawable2)); assertFalse(myAbsSeekBar.verifyDrawable(drawable3)); assertFalse(myAbsSeekBar.verifyDrawable(drawable4)); assertTrue(myAbsSeekBar.verifyDrawable(null)); myAbsSeekBar.setThumb(drawable1); assertTrue(myAbsSeekBar.verifyDrawable(drawable1)); assertFalse(myAbsSeekBar.verifyDrawable(drawable2)); assertFalse(myAbsSeekBar.verifyDrawable(drawable3)); assertFalse(myAbsSeekBar.verifyDrawable(drawable4)); assertTrue(myAbsSeekBar.verifyDrawable(null)); myAbsSeekBar.setThumb(drawable2); assertFalse(myAbsSeekBar.verifyDrawable(drawable1)); assertTrue(myAbsSeekBar.verifyDrawable(drawable2)); assertFalse(myAbsSeekBar.verifyDrawable(drawable3)); assertFalse(myAbsSeekBar.verifyDrawable(drawable4)); assertTrue(myAbsSeekBar.verifyDrawable(null)); myAbsSeekBar.setBackgroundDrawable(drawable2); myAbsSeekBar.setProgressDrawable(drawable3); myAbsSeekBar.setIndeterminateDrawable(drawable4); assertFalse(myAbsSeekBar.verifyDrawable(drawable1)); assertTrue(myAbsSeekBar.verifyDrawable(drawable2)); assertTrue(myAbsSeekBar.verifyDrawable(drawable3)); assertTrue(myAbsSeekBar.verifyDrawable(drawable4)); assertFalse(myAbsSeekBar.verifyDrawable(null)); } public void testAccessKeyProgressIncrement() throws Throwable { // AbsSeekBar is an abstract class, use its subclass: SeekBar to do this test. runTestOnUiThread(new Runnable() { public void run() { mActivity.setContentView(R.layout.seekbar); } }); getInstrumentation().waitForIdleSync(); final SeekBar seekBar = (SeekBar) mActivity.findViewById(R.id.seekBar); final int keyProgressIncrement = 2; runTestOnUiThread(new Runnable() { public void run() { seekBar.setKeyProgressIncrement(keyProgressIncrement); seekBar.setFocusable(true); seekBar.requestFocus(); } }); new PollingCheck(1000) { @Override protected boolean check() { return seekBar.hasWindowFocus(); } }.run(); assertEquals(keyProgressIncrement, seekBar.getKeyProgressIncrement()); int oldProgress = seekBar.getProgress(); KeyEvent keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT); getInstrumentation().sendKeySync(keyEvent); assertEquals(oldProgress + keyProgressIncrement, seekBar.getProgress()); oldProgress = seekBar.getProgress(); keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT); getInstrumentation().sendKeySync(keyEvent); assertEquals(oldProgress - keyProgressIncrement, seekBar.getProgress()); } public void testSetMax() { MyAbsSeekBar myAbsSeekBar = new MyAbsSeekBar(mActivity, null, R.style.TestProgressBar); int progress = 10; myAbsSeekBar.setProgress(progress); int max = progress + 1; myAbsSeekBar.setMax(max); assertEquals(max, myAbsSeekBar.getMax()); assertEquals(progress, myAbsSeekBar.getProgress()); assertEquals(1, myAbsSeekBar.getKeyProgressIncrement()); max = progress - 1; myAbsSeekBar.setMax(max); assertEquals(max, myAbsSeekBar.getMax()); assertEquals(max, myAbsSeekBar.getProgress()); assertEquals(1, myAbsSeekBar.getKeyProgressIncrement()); int keyProgressIncrement = 10; myAbsSeekBar.setKeyProgressIncrement(keyProgressIncrement); assertEquals(keyProgressIncrement, myAbsSeekBar.getKeyProgressIncrement()); max = (keyProgressIncrement - 1) * 20; myAbsSeekBar.setMax(max); assertEquals(keyProgressIncrement, myAbsSeekBar.getKeyProgressIncrement()); max = (keyProgressIncrement + 1) * 20; myAbsSeekBar.setMax(max); assertEquals(keyProgressIncrement + 1, myAbsSeekBar.getKeyProgressIncrement()); } public void testFoo() { // Do not test these APIs. They are callbacks which: // 1. The callback machanism has been tested in super class // 2. The functionality is implmentation details, no need to test } private static class MyAbsSeekBar extends AbsSeekBar { public MyAbsSeekBar(Context context) { super(context); } public MyAbsSeekBar(Context context, AttributeSet attrs) { super(context, attrs); } public MyAbsSeekBar(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override protected void drawableStateChanged() { super.drawableStateChanged(); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who); } } private static class MockDrawable extends Drawable { private int mAlpha; private boolean mCalledDraw = false; @Override public void draw(Canvas canvas) { mCalledDraw = true; } public boolean hasCalledDraw() { return mCalledDraw; } public void reset() { mCalledDraw = false; } @Override public int getOpacity() { return 0; } @Override public void setAlpha(int alpha) { mAlpha = alpha; } public int getAlpha() { return mAlpha; } @Override public void setColorFilter(ColorFilter cf) { } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.security.sasl.gssapi; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; import org.apache.directory.api.ldap.model.ldif.LdifEntry; import org.apache.directory.api.ldap.model.ldif.LdifReader; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.server.core.api.CoreSession; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory; import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.factory.PartitionFactory; import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; import org.apache.directory.server.kerberos.KerberosConfig; import org.apache.directory.server.kerberos.kdc.KdcServer; import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory; import org.apache.directory.server.kerberos.shared.keytab.Keytab; import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.protocol.shared.transport.TcpTransport; import org.apache.directory.server.protocol.shared.transport.Transport; import org.apache.directory.server.protocol.shared.transport.UdpTransport; import org.apache.directory.shared.kerberos.KerberosTime; import org.apache.directory.shared.kerberos.codec.types.EncryptionType; import org.apache.directory.shared.kerberos.components.EncryptionKey; import org.jboss.logging.Logger; import javax.security.auth.kerberos.KerberosPrincipal; /** * Utility class to wrap starting and stopping of the directory server and the KDC. * * @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a> */ public class TestKDC { public static final int LDAP_PORT = 11390; private static Logger log = Logger.getLogger(TestKDC.class); private File workingDir; private DirectoryService directoryService; private KdcServer kdcServer; private String originalConfig; private boolean exposeLdapServer; private LdapServer ldapServer; public TestKDC() { this(false); } public TestKDC(boolean exposeLdapServer) { this.exposeLdapServer = exposeLdapServer; } public void startDirectoryService() { if (directoryService != null) { throw new IllegalStateException("DirectoryService already started"); } createWorkingDir(); try { DirectoryServiceFactory dsf = new DefaultDirectoryServiceFactory(); DirectoryService ds = dsf.getDirectoryService(); dsf.init("Test Service"); ds.getChangeLog().setEnabled(false); ds.addLast(new KeyDerivationInterceptor()); SchemaManager schemaManager = ds.getSchemaManager(); createPartition(dsf, schemaManager, "wildfly", "dc=wildfly,dc=org", ds, workingDir, "uid", "krb5PrincipalName"); CoreSession adminSession = ds.getAdminSession(); processLdif(schemaManager, adminSession, "/KerberosTesting.ldif"); directoryService = ds; if (exposeLdapServer) { ldapServer = new LdapServer(); ldapServer.setServiceName("DefaultLDAP"); Transport ldap = new TcpTransport("localhost", LDAP_PORT, 3, 5); ldapServer.addTransports(ldap); ldapServer.setDirectoryService(directoryService); ldapServer.start(); } } catch (Exception e) { throw new IllegalStateException("Unable to initialise DirectoryService", e); } } private static void createPartition(final DirectoryServiceFactory dsf, final SchemaManager schemaManager, final String id, final String suffix, final DirectoryService directoryService, final File workingDir, final String... indexAttributes) throws Exception { PartitionFactory pf = dsf.getPartitionFactory(); Partition p = pf.createPartition(schemaManager, directoryService.getDnFactory(), id, suffix, 1000, workingDir); for (String current : indexAttributes) { pf.addIndex(p, current, 10); } p.initialize(); directoryService.addPartition(p); } private static void processLdif(final SchemaManager schemaManager, final CoreSession adminSession, final String ldifName) throws Exception { InputStream ldifInput = TestKDC.class.getResourceAsStream(ldifName); LdifReader ldifReader = new LdifReader(ldifInput); for (LdifEntry ldifEntry : ldifReader) { adminSession.add(new DefaultEntry(schemaManager, ldifEntry.getEntry())); } ldifReader.close(); ldifInput.close(); } private void stopDirectoryService() { if (directoryService == null) { return; } try { directoryService.shutdown(); directoryService = null; } catch (Exception e) { throw new IllegalStateException("Error shutting down directory service", e); } } public void startKDC() { if (directoryService == null) { throw new IllegalStateException("No DirectoryService Available for KDC"); } if (kdcServer != null) { throw new IllegalStateException("KDCServer already started"); } final URL configPath = TestKDC.class.getResource("/krb5.conf"); originalConfig = System.setProperty("java.security.krb5.conf", configPath.getFile()); KdcServer kdcServer = new KdcServer(); kdcServer.setServiceName("TestKDCServer"); kdcServer.setSearchBaseDn("dc=wildfly,dc=org"); KerberosConfig config = kdcServer.getConfig(); config.setServicePrincipal("krbtgt/WILDFLY.ORG@WILDFLY.ORG"); config.setPrimaryRealm("WILDFLY.ORG"); config.setMaximumTicketLifetime(60000 * 1440); config.setMaximumRenewableLifetime(60000 * 10080); config.setPaEncTimestampRequired(false); UdpTransport udp = new UdpTransport("localhost", 6088); kdcServer.addTransports(udp); kdcServer.setDirectoryService(directoryService); // Launch the server try { kdcServer.start(); this.kdcServer = kdcServer; } catch (IOException | LdapInvalidDnException e) { throw new IllegalStateException("Unable to start KDC", e); } } private void stopKDC() { if (kdcServer == null) { return; } kdcServer.stop(); kdcServer = null; if (originalConfig != null) { System.setProperty("java.security.krb5.conf", originalConfig); } } private void createWorkingDir() { workingDir = new File("./target/apache-ds/working"); if (workingDir.exists() == false) { if (workingDir.mkdirs() == false) { throw new IllegalStateException("Unable to create working dir."); } } emptyDir(workingDir); } private void cleanWorkingDir() { emptyDir(workingDir); workingDir = null; } private void emptyDir(final File dir) { for (File current : dir.listFiles()) { if (current.delete() == false) { try { throw new IllegalStateException(String.format("Unable to delete file '%s' from working dir '%s'.", current.getName(), workingDir.getCanonicalPath())); } catch (IOException e) { throw new IllegalStateException(e); } } } } public void stopAll() { stopKDC(); stopDirectoryService(); //cleanWorkingDir(); } public String generateKeyTab(String keyTabFileName, String principal, String password) { log.debug("Generating keytab: " + keyTabFileName); List<KeytabEntry> entries = new ArrayList<>(); KerberosTime ktm = new KerberosTime(); for (Map.Entry<EncryptionType, EncryptionKey> keyEntry : KerberosKeyFactory.getKerberosKeys(principal, password) .entrySet()) { EncryptionKey key = keyEntry.getValue(); log.debug("Adding key=" + key); entries.add(new KeytabEntry(principal, KerberosPrincipal.KRB_NT_PRINCIPAL, ktm, (byte) key.getKeyVersion(), key)); } Keytab keyTab = Keytab.getInstance(); keyTab.setEntries(entries); try { File keyTabFile = new File(workingDir, keyTabFileName); keyTab.write(keyTabFile); return keyTabFile.getAbsolutePath(); } catch (IOException e) { throw new IllegalStateException("Cannot create keytab: ", e); } } }
package com.MisterBlock11.MagicCraft.models; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelRenderer; import net.minecraft.entity.Entity; /** * Block Model - Undefined * Created using Tabula 5.0.0 * :D Thank You iChun! */ public class OrbModel extends ModelBase { public ModelRenderer shape2; public ModelRenderer shape4; public ModelRenderer shape5; public ModelRenderer shape6; public ModelRenderer shape7; public ModelRenderer shape8; public ModelRenderer shape9; public ModelRenderer shape10; public ModelRenderer shape11; public ModelRenderer shape2_1; public ModelRenderer shape4_1; public ModelRenderer shape5_1; public ModelRenderer shape6_1; public ModelRenderer shape7_1; public ModelRenderer shape8_1; public ModelRenderer shape9_1; public ModelRenderer shape10_1; public ModelRenderer shape11_1; public ModelRenderer shape2_2; public ModelRenderer shape4_2; public ModelRenderer shape5_2; public ModelRenderer shape6_2; public ModelRenderer shape7_2; public ModelRenderer shape8_2; public ModelRenderer shape9_2; public ModelRenderer shape10_2; public ModelRenderer shape11_2; public ModelRenderer shape2_3; public ModelRenderer shape4_3; public ModelRenderer shape5_3; public ModelRenderer shape6_3; public ModelRenderer shape7_3; public ModelRenderer shape8_3; public ModelRenderer shape9_3; public ModelRenderer shape10_3; public ModelRenderer shape11_3; public ModelRenderer shape2_4; public ModelRenderer shape4_4; public ModelRenderer shape5_4; public ModelRenderer shape6_4; public ModelRenderer shape7_4; public ModelRenderer shape8_4; public ModelRenderer shape9_4; public ModelRenderer shape10_4; public ModelRenderer shape11_4; public ModelRenderer shape2_5; public ModelRenderer shape4_5; public ModelRenderer shape5_5; public ModelRenderer shape6_5; public ModelRenderer shape7_5; public ModelRenderer shape8_5; public ModelRenderer shape9_5; public ModelRenderer shape10_5; public ModelRenderer shape11_5; public OrbModel() { this.textureWidth = 32; this.textureHeight = 32; this.shape6_3 = new ModelRenderer(this, 0, 0); this.shape6_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape6_3.addBox(-3.0F, 2.0F, -3.0F, 6, 1, 6, 0.0F); this.shape2 = new ModelRenderer(this, 0, 0); this.shape2.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape2.addBox(-1.0F, 4.0F, -3.0F, 2, 1, 6, 0.0F); this.setRotateAngle(shape2, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape11_4 = new ModelRenderer(this, 0, 0); this.shape11_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape11_4.addBox(-5.0F, 2.0F, -3.0F, 10, 1, 6, 0.0F); this.setRotateAngle(shape11_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape10 = new ModelRenderer(this, 0, 0); this.shape10.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape10.addBox(-3.0F, 2.0F, -5.0F, 6, 1, 10, 0.0F); this.setRotateAngle(shape10, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape10_5 = new ModelRenderer(this, 0, 0); this.shape10_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape10_5.addBox(-3.0F, 2.0F, -5.0F, 6, 1, 10, 0.0F); this.setRotateAngle(shape10_5, 3.141592653589793F, 0.0F, 0.0F); this.shape11_2 = new ModelRenderer(this, 0, 0); this.shape11_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape11_2.addBox(-5.0F, 2.0F, -3.0F, 10, 1, 6, 0.0F); this.setRotateAngle(shape11_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape8 = new ModelRenderer(this, 0, 0); this.shape8.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape8.addBox(-4.0F, 3.0F, -2.0F, 8, 1, 4, 0.0F); this.setRotateAngle(shape8, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape9_5 = new ModelRenderer(this, 0, 0); this.shape9_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape9_5.addBox(-4.0F, 2.0F, -4.0F, 8, 1, 8, 0.0F); this.setRotateAngle(shape9_5, 3.141592653589793F, 0.0F, 0.0F); this.shape6_4 = new ModelRenderer(this, 0, 0); this.shape6_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape6_4.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 6, 0.0F); this.setRotateAngle(shape6_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape10_3 = new ModelRenderer(this, 0, 0); this.shape10_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape10_3.addBox(-3.0F, 1.0F, -5.0F, 6, 1, 10, 0.0F); this.shape5_2 = new ModelRenderer(this, 0, 0); this.shape5_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape5_2.addBox(-2.0F, 4.0F, -2.0F, 4, 1, 4, 0.0F); this.setRotateAngle(shape5_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape7_4 = new ModelRenderer(this, 0, 0); this.shape7_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape7_4.addBox(-2.0F, 3.0F, -4.0F, 4, 1, 8, 0.0F); this.setRotateAngle(shape7_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape7_5 = new ModelRenderer(this, 0, 0); this.shape7_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape7_5.addBox(-2.0F, 3.0F, -4.0F, 4, 1, 8, 0.0F); this.setRotateAngle(shape7_5, 3.141592653589793F, 0.0F, 0.0F); this.shape11 = new ModelRenderer(this, 0, 0); this.shape11.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape11.addBox(-5.0F, 2.0F, -3.0F, 10, 1, 6, 0.0F); this.setRotateAngle(shape11, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape11_1 = new ModelRenderer(this, 0, 0); this.shape11_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape11_1.addBox(-5.0F, 2.0F, -3.0F, 10, 1, 6, 0.0F); this.setRotateAngle(shape11_1, 4.71238898038469F, 0.0F, 0.0F); this.shape11_5 = new ModelRenderer(this, 0, 0); this.shape11_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape11_5.addBox(-5.0F, 2.0F, -3.0F, 10, 1, 6, 0.0F); this.setRotateAngle(shape11_5, 3.141592653589793F, 0.0F, 0.0F); this.shape8_1 = new ModelRenderer(this, 0, 0); this.shape8_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape8_1.addBox(-4.0F, 3.0F, -2.0F, 8, 1, 4, 0.0F); this.setRotateAngle(shape8_1, 4.71238898038469F, 0.0F, 0.0F); this.shape6_1 = new ModelRenderer(this, 0, 0); this.shape6_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape6_1.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 6, 0.0F); this.setRotateAngle(shape6_1, 4.71238898038469F, 0.0F, 0.0F); this.shape6_2 = new ModelRenderer(this, 0, 0); this.shape6_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape6_2.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 6, 0.0F); this.setRotateAngle(shape6_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape8_4 = new ModelRenderer(this, 0, 0); this.shape8_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape8_4.addBox(-4.0F, 3.0F, -2.0F, 8, 1, 4, 0.0F); this.setRotateAngle(shape8_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape7_1 = new ModelRenderer(this, 0, 0); this.shape7_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape7_1.addBox(-2.0F, 3.0F, -4.0F, 4, 1, 8, 0.0F); this.setRotateAngle(shape7_1, 4.71238898038469F, 0.0F, 0.0F); this.shape4_1 = new ModelRenderer(this, 0, 0); this.shape4_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape4_1.addBox(-3.0F, 4.0F, -1.0F, 6, 1, 2, 0.0F); this.setRotateAngle(shape4_1, 4.71238898038469F, 0.0F, 0.0F); this.shape2_2 = new ModelRenderer(this, 0, 0); this.shape2_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape2_2.addBox(-1.0F, 4.0F, -3.0F, 2, 1, 6, 0.0F); this.setRotateAngle(shape2_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape8_2 = new ModelRenderer(this, 0, 0); this.shape8_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape8_2.addBox(-4.0F, 3.0F, -2.0F, 8, 1, 4, 0.0F); this.setRotateAngle(shape8_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape4_2 = new ModelRenderer(this, 0, 0); this.shape4_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape4_2.addBox(-3.0F, 4.0F, -1.0F, 6, 1, 2, 0.0F); this.setRotateAngle(shape4_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape11_3 = new ModelRenderer(this, 0, 0); this.shape11_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape11_3.addBox(-5.0F, 1.0F, -3.0F, 10, 1, 6, 0.0F); this.shape10_2 = new ModelRenderer(this, 0, 0); this.shape10_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape10_2.addBox(-3.0F, 2.0F, -5.0F, 6, 1, 10, 0.0F); this.setRotateAngle(shape10_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape9_3 = new ModelRenderer(this, 0, 0); this.shape9_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape9_3.addBox(-4.0F, 1.0F, -4.0F, 8, 1, 8, 0.0F); this.shape4_3 = new ModelRenderer(this, 0, 0); this.shape4_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape4_3.addBox(-3.0F, 3.0F, -1.0F, 6, 1, 2, 0.0F); this.shape2_3 = new ModelRenderer(this, 0, 0); this.shape2_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape2_3.addBox(-1.0F, 3.0F, -3.0F, 2, 1, 6, 0.0F); this.shape9 = new ModelRenderer(this, 0, 0); this.shape9.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape9.addBox(-4.0F, 2.0F, -4.0F, 8, 1, 8, 0.0F); this.setRotateAngle(shape9, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape8_3 = new ModelRenderer(this, 0, 0); this.shape8_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape8_3.addBox(-4.0F, 2.0F, -2.0F, 8, 1, 4, 0.0F); this.shape6_5 = new ModelRenderer(this, 0, 0); this.shape6_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape6_5.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 6, 0.0F); this.setRotateAngle(shape6_5, 3.141592653589793F, 0.0F, 0.0F); this.shape10_4 = new ModelRenderer(this, 0, 0); this.shape10_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape10_4.addBox(-3.0F, 2.0F, -5.0F, 6, 1, 10, 0.0F); this.setRotateAngle(shape10_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape4_5 = new ModelRenderer(this, 0, 0); this.shape4_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape4_5.addBox(-3.0F, 4.0F, -1.0F, 6, 1, 2, 0.0F); this.setRotateAngle(shape4_5, 3.141592653589793F, 0.0F, 0.0F); this.shape10_1 = new ModelRenderer(this, 0, 0); this.shape10_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape10_1.addBox(-3.0F, 2.0F, -5.0F, 6, 1, 10, 0.0F); this.setRotateAngle(shape10_1, 4.71238898038469F, 0.0F, 0.0F); this.shape5_3 = new ModelRenderer(this, 0, 0); this.shape5_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape5_3.addBox(-2.0F, 3.0F, -2.0F, 4, 1, 4, 0.0F); this.shape2_4 = new ModelRenderer(this, 0, 0); this.shape2_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape2_4.addBox(-1.0F, 4.0F, -3.0F, 2, 1, 6, 0.0F); this.setRotateAngle(shape2_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape5_4 = new ModelRenderer(this, 0, 0); this.shape5_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape5_4.addBox(-2.0F, 4.0F, -2.0F, 4, 1, 4, 0.0F); this.setRotateAngle(shape5_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape2_5 = new ModelRenderer(this, 0, 0); this.shape2_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape2_5.addBox(-1.0F, 4.0F, -3.0F, 2, 1, 6, 0.0F); this.setRotateAngle(shape2_5, 3.141592653589793F, 0.0F, 0.0F); this.shape7_2 = new ModelRenderer(this, 0, 0); this.shape7_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape7_2.addBox(-2.0F, 3.0F, -4.0F, 4, 1, 8, 0.0F); this.setRotateAngle(shape7_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape6 = new ModelRenderer(this, 0, 0); this.shape6.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape6.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 6, 0.0F); this.setRotateAngle(shape6, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape4_4 = new ModelRenderer(this, 0, 0); this.shape4_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape4_4.addBox(-3.0F, 4.0F, -1.0F, 6, 1, 2, 0.0F); this.setRotateAngle(shape4_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape9_2 = new ModelRenderer(this, 0, 0); this.shape9_2.setRotationPoint(0.0F, 16.0F, 2.0F); this.shape9_2.addBox(-4.0F, 2.0F, -4.0F, 8, 1, 8, 0.0F); this.setRotateAngle(shape9_2, 1.5707963267948966F, 0.0F, 0.0F); this.shape4 = new ModelRenderer(this, 0, 0); this.shape4.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape4.addBox(-3.0F, 4.0F, -1.0F, 6, 1, 2, 0.0F); this.setRotateAngle(shape4, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape9_4 = new ModelRenderer(this, 0, 0); this.shape9_4.setRotationPoint(-2.0F, 16.0F, 0.0F); this.shape9_4.addBox(-4.0F, 2.0F, -4.0F, 8, 1, 8, 0.0F); this.setRotateAngle(shape9_4, 1.5707963267948966F, 4.71238898038469F, 0.0F); this.shape2_1 = new ModelRenderer(this, 0, 0); this.shape2_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape2_1.addBox(-1.0F, 4.0F, -3.0F, 2, 1, 6, 0.0F); this.setRotateAngle(shape2_1, 4.71238898038469F, 0.0F, 0.0F); this.shape9_1 = new ModelRenderer(this, 0, 0); this.shape9_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape9_1.addBox(-4.0F, 2.0F, -4.0F, 8, 1, 8, 0.0F); this.setRotateAngle(shape9_1, 4.71238898038469F, 0.0F, 0.0F); this.shape8_5 = new ModelRenderer(this, 0, 0); this.shape8_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape8_5.addBox(-4.0F, 3.0F, -2.0F, 8, 1, 4, 0.0F); this.setRotateAngle(shape8_5, 3.141592653589793F, 0.0F, 0.0F); this.shape5_5 = new ModelRenderer(this, 0, 0); this.shape5_5.setRotationPoint(0.0F, 14.0F, 0.0F); this.shape5_5.addBox(-2.0F, 4.0F, -2.0F, 4, 1, 4, 0.0F); this.setRotateAngle(shape5_5, 3.141592653589793F, 0.0F, 0.0F); this.shape7 = new ModelRenderer(this, 0, 0); this.shape7.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape7.addBox(-2.0F, 3.0F, -4.0F, 4, 1, 8, 0.0F); this.setRotateAngle(shape7, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape5 = new ModelRenderer(this, 0, 0); this.shape5.setRotationPoint(2.0F, 16.0F, 0.0F); this.shape5.addBox(-2.0F, 4.0F, -2.0F, 4, 1, 4, 0.0F); this.setRotateAngle(shape5, 1.5707963267948966F, 1.5707963267948966F, 0.0F); this.shape5_1 = new ModelRenderer(this, 0, 0); this.shape5_1.setRotationPoint(0.0F, 16.0F, -2.0F); this.shape5_1.addBox(-2.0F, 4.0F, -2.0F, 4, 1, 4, 0.0F); this.setRotateAngle(shape5_1, 4.71238898038469F, 0.0F, 0.0F); this.shape7_3 = new ModelRenderer(this, 0, 0); this.shape7_3.setRotationPoint(0.0F, 19.0F, 0.0F); this.shape7_3.addBox(-2.0F, 2.0F, -4.0F, 4, 1, 8, 0.0F); } @Override public void render(Entity entity, float f, float f1, float f2, float f3, float f4, float f5) { this.shape6_3.render(f5); this.shape2.render(f5); this.shape11_4.render(f5); this.shape10.render(f5); this.shape10_5.render(f5); this.shape11_2.render(f5); this.shape8.render(f5); this.shape9_5.render(f5); this.shape6_4.render(f5); this.shape10_3.render(f5); this.shape5_2.render(f5); this.shape7_4.render(f5); this.shape7_5.render(f5); this.shape11.render(f5); this.shape11_1.render(f5); this.shape11_5.render(f5); this.shape8_1.render(f5); this.shape6_1.render(f5); this.shape6_2.render(f5); this.shape8_4.render(f5); this.shape7_1.render(f5); this.shape4_1.render(f5); this.shape2_2.render(f5); this.shape8_2.render(f5); this.shape4_2.render(f5); this.shape11_3.render(f5); this.shape10_2.render(f5); this.shape9_3.render(f5); this.shape4_3.render(f5); this.shape2_3.render(f5); this.shape9.render(f5); this.shape8_3.render(f5); this.shape6_5.render(f5); this.shape10_4.render(f5); this.shape4_5.render(f5); this.shape10_1.render(f5); this.shape5_3.render(f5); this.shape2_4.render(f5); this.shape5_4.render(f5); this.shape2_5.render(f5); this.shape7_2.render(f5); this.shape6.render(f5); this.shape4_4.render(f5); this.shape9_2.render(f5); this.shape4.render(f5); this.shape9_4.render(f5); this.shape2_1.render(f5); this.shape9_1.render(f5); this.shape8_5.render(f5); this.shape5_5.render(f5); this.shape7.render(f5); this.shape5.render(f5); this.shape5_1.render(f5); this.shape7_3.render(f5); } public void renderModel(float f5) { this.shape6_3.render(f5); this.shape2.render(f5); this.shape11_4.render(f5); this.shape10.render(f5); this.shape10_5.render(f5); this.shape11_2.render(f5); this.shape8.render(f5); this.shape9_5.render(f5); this.shape6_4.render(f5); this.shape10_3.render(f5); this.shape5_2.render(f5); this.shape7_4.render(f5); this.shape7_5.render(f5); this.shape11.render(f5); this.shape11_1.render(f5); this.shape11_5.render(f5); this.shape8_1.render(f5); this.shape6_1.render(f5); this.shape6_2.render(f5); this.shape8_4.render(f5); this.shape7_1.render(f5); this.shape4_1.render(f5); this.shape2_2.render(f5); this.shape8_2.render(f5); this.shape4_2.render(f5); this.shape11_3.render(f5); this.shape10_2.render(f5); this.shape9_3.render(f5); this.shape4_3.render(f5); this.shape2_3.render(f5); this.shape9.render(f5); this.shape8_3.render(f5); this.shape6_5.render(f5); this.shape10_4.render(f5); this.shape4_5.render(f5); this.shape10_1.render(f5); this.shape5_3.render(f5); this.shape2_4.render(f5); this.shape5_4.render(f5); this.shape2_5.render(f5); this.shape7_2.render(f5); this.shape6.render(f5); this.shape4_4.render(f5); this.shape9_2.render(f5); this.shape4.render(f5); this.shape9_4.render(f5); this.shape2_1.render(f5); this.shape9_1.render(f5); this.shape8_5.render(f5); this.shape5_5.render(f5); this.shape7.render(f5); this.shape5.render(f5); this.shape5_1.render(f5); this.shape7_3.render(f5); } /** * This is a helper function from Tabula to set the rotation of model parts */ public void setRotateAngle(ModelRenderer modelRenderer, float x, float y, float z) { modelRenderer.rotateAngleX = x; modelRenderer.rotateAngleY = y; modelRenderer.rotateAngleZ = z; } }
package com.aaron.pseplanner.service.implementation; import android.util.Pair; import com.aaron.pseplanner.test.utils.UnitTestUtils; import com.aaron.pseplanner.bean.TickerDto; import com.aaron.pseplanner.response.phisix.ResponsePhisixStockWrapper; import com.aaron.pseplanner.service.PhisixService; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.List; import edu.emory.mathcs.backport.java.util.Collections; import io.reactivex.Single; import static org.mockito.Mockito.when; /** * Created by Aaron on 28/12/2017. */ public class PhisixHttpClientTest extends AbstractHttpClientTest { @Mock private PhisixService service; @InjectMocks private PhisixHttpClient client; @Test public void givenAStock_whenGetTickerWithTheStockSymbol_thenTheStockShouldBeReturned() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); String name = "Alsons Consolidated Resource"; String symbol = "ACR"; double percentChange = 13.89; long volume = 1_719_000; double amount = 1.35; ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(name, amount, percentChange, volume, symbol, dateUpdated); givenGetStockSymbolReturnMockedResponse(symbol, mockedResponseWrapper); Pair<TickerDto, Date> response = whenApiGetTickerRequest(symbol); Pair<TickerDto, Date> expectedResponse = thenTheResponseIs(symbol, name, volume, amount, percentChange, dateUpdated); thenResponseShouldMatchExpectedResponse(response, expectedResponse); } @Test public void givenAStock_whenGetTickerWithNullSymbol_thenIllegalArgumentExceptionIsThrownWithMessage() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); String name = "Alsons Consolidated Resource"; String symbol = "ACR"; double percentChange = 13.89; long volume = 1_719_000; double amount = 1.35; ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(name, amount, percentChange, volume, symbol, dateUpdated); givenGetStockSymbolReturnMockedResponse(symbol, mockedResponseWrapper); expectingExceptionWillBeThrownWithMessage(IllegalArgumentException.class, "Symbol must not be empty"); whenApiGetTickerRequest(null); } @Test public void givenAStock_whenGetTickerWithEmptySymbol_thenIllegalArgumentExceptionIsThrownWithMessage() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); String name = "Alsons Consolidated Resource"; String symbol = "ACR"; double percentChange = 13.89; long volume = 1_719_000; double amount = 1.35; ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(name, amount, percentChange, volume, symbol, dateUpdated); givenGetStockSymbolReturnMockedResponse(symbol, mockedResponseWrapper); expectingExceptionWillBeThrownWithMessage(IllegalArgumentException.class, "Symbol must not be empty"); whenApiGetTickerRequest(""); } @Test public void givenAListOfStocks_whenGetAllTickers_thenTheListOfStocksShouldBeReturned() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); List<String> nameList = Arrays.asList("Alsons Consolidated Resource", "Calata Corporation", "Eagle Cement Corporation", "Philippine National Bank", "Wilcon Depot, Inc."); List<String> symbolList = Arrays.asList("ACR", "CAL", "EAGLE", "PNB", "WLCON"); List<Double> percentChangeList = Arrays.asList(13.89, -29.12, 0.10, 3.93, 37.05); List<Long> volumeList = Arrays.asList(1_719_000L, 380_000L, 153_400L, 19_800L, 730_000L); List<Double> amountList = Arrays.asList(1.35, 2.31, 14.44, 57.15, 8.03); ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(nameList, amountList, percentChangeList, volumeList, symbolList, dateUpdated); givenGetAllStocksReturnMockedResponse(mockedResponseWrapper); Pair<List<TickerDto>, Date> response = whenApiGetAllTickersRequest(); Pair<List<TickerDto>, Date> expectedResponse = thenTheResponseAre(symbolList, nameList, volumeList, amountList, percentChangeList, dateUpdated); thenResponsesShouldMatchExpectedResponses(response, expectedResponse); } @Test public void givenAListOfStocks_whenGetTickerListWithTheListOfStockSymbols_thenTheListOfStocksShouldBeReturned() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); List<String> nameList = Arrays.asList("Alsons Consolidated Resource", "Calata Corporation", "Eagle Cement Corporation", "Philippine National Bank", "Wilcon Depot, Inc."); List<String> symbolList = Arrays.asList("ACR", "CAL", "EAGLE", "PNB", "WLCON"); List<Double> percentChangeList = Arrays.asList(13.89, -29.12, 0.10, 3.93, 37.05); List<Long> volumeList = Arrays.asList(1_719_000L, 380_000L, 153_400L, 19_800L, 730_000L); List<Double> amountList = Arrays.asList(1.35, 2.31, 14.44, 57.15, 8.03); List<ResponsePhisixStockWrapper> mockedResponseWrapperList = givenListOfResponseStockWrapper(nameList, amountList, percentChangeList, volumeList, symbolList, dateUpdated); givenGetStockSymbolListReturnMockedResponse(symbolList, mockedResponseWrapperList); Pair<List<TickerDto>, Date> response = whenApiGetListOfTickersRequest(symbolList); Pair<List<TickerDto>, Date> expectedResponse = thenTheResponseAre(symbolList, nameList, volumeList, amountList, percentChangeList, dateUpdated); thenResponsesShouldMatchExpectedResponses(response, expectedResponse); } @Test public void givenAListOfStocks_whenGetTickerListWithEmptyList_thenIllegalArgumentExceptionIsThrownWithMessage() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); List<String> nameList = Arrays.asList("Alsons Consolidated Resource", "Calata Corporation", "Eagle Cement Corporation", "Philippine National Bank", "Wilcon Depot, Inc."); List<String> symbolList = Arrays.asList("ACR", "CAL", "EAGLE", "PNB", "WLCON"); List<Double> percentChangeList = Arrays.asList(13.89, -29.12, 0.10, 3.93, 37.05); List<Long> volumeList = Arrays.asList(1_719_000L, 380_000L, 153_400L, 19_800L, 730_000L); List<Double> amountList = Arrays.asList(1.35, 2.31, 14.44, 57.15, 8.03); ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(nameList, amountList, percentChangeList, volumeList, symbolList, dateUpdated); givenGetAllStocksReturnMockedResponse(mockedResponseWrapper); expectingExceptionWillBeThrownWithMessage(IllegalArgumentException.class, "No trade plan/s to update"); whenApiGetListOfTickersRequest(Collections.<String> emptyList()); } @Test public void givenAListOfStocks_whenGetTickerListWithNullList_thenIllegalArgumentExceptionIsThrownWithMessage() { Date dateUpdated = UnitTestUtils.newDateTime(2018, 5, 17, 9, 45, 21); List<String> nameList = Arrays.asList("Alsons Consolidated Resource", "Calata Corporation", "Eagle Cement Corporation", "Philippine National Bank", "Wilcon Depot, Inc."); List<String> symbolList = Arrays.asList("ACR", "CAL", "EAGLE", "PNB", "WLCON"); List<Double> percentChangeList = Arrays.asList(13.89, -29.12, 0.10, 3.93, 37.05); List<Long> volumeList = Arrays.asList(1_719_000L, 380_000L, 153_400L, 19_800L, 730_000L); List<Double> amountList = Arrays.asList(1.35, 2.31, 14.44, 57.15, 8.03); ResponsePhisixStockWrapper mockedResponseWrapper = givenResponseStockWrapper(nameList, amountList, percentChangeList, volumeList, symbolList, dateUpdated); givenGetAllStocksReturnMockedResponse(mockedResponseWrapper); expectingExceptionWillBeThrownWithMessage(IllegalArgumentException.class, "No trade plan/s to update"); whenApiGetListOfTickersRequest(null); } private void givenGetStockSymbolListReturnMockedResponse(List<String> symbol, List<ResponsePhisixStockWrapper> mockedResponseWrapperList) { int size = symbol.size(); for(int i = 0; i < size; i++) { when(service.getStock(symbol.get(i))).thenReturn(Single.just(mockedResponseWrapperList.get(i))); } } private void givenGetStockSymbolReturnMockedResponse(String symbol, ResponsePhisixStockWrapper mockedResponse) { when(service.getStock(symbol)).thenReturn(Single.just(mockedResponse)); } private void givenGetAllStocksReturnMockedResponse(ResponsePhisixStockWrapper mockedResponse) { when(service.getStock()).thenReturn(Single.just(mockedResponse)); } private Pair<TickerDto, Date> whenApiGetTickerRequest(String symbol) { return client.getTicker(symbol).blockingGet(); } private Pair<List<TickerDto>, Date> whenApiGetListOfTickersRequest(Collection<String> symbols) { return client.getTickerList(symbols).blockingGet(); } private Pair<List<TickerDto>, Date> whenApiGetAllTickersRequest() { return client.getAllTickerList().blockingGet(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.vault.util; import java.io.IOException; import java.util.Calendar; import javax.jcr.Node; import javax.jcr.Property; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.nodetype.NodeType; import org.apache.jackrabbit.commons.JcrUtils; import org.apache.jackrabbit.vault.fs.config.ConfigurationException; import org.apache.jackrabbit.vault.packaging.integration.IntegrationTestBase; import org.junit.Test; /** * {@code RCPIT}... */ public class RCPIT extends IntegrationTestBase { public static final String SRC_PATH = "/testroot/src"; public static final String SRC_TEST_NODE_PATH = "/testroot/src/a"; public static final String DST_PATH = "/testroot/dst"; public static final String DST_TEST_NODE_PATH = "/testroot/dst/a"; @Test public void testSimple() throws IOException, RepositoryException, ConfigurationException { Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_UNSTRUCTURED, NodeType.NT_UNSTRUCTURED, admin, true); a.setProperty("p0", "0"); a.setProperty("p1", "1"); a.setProperty("m0", new String[]{"0", "1", "2"}, PropertyType.STRING); admin.save(); assertNodeExists(SRC_TEST_NODE_PATH); RepositoryCopier rcp = new RepositoryCopier(); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/p0", "0"); assertProperty(DST_TEST_NODE_PATH + "/p1", "1"); assertProperty(DST_TEST_NODE_PATH + "/m0", new String[]{"0", "1", "2"}); } @Test public void testMixin() throws IOException, RepositoryException, ConfigurationException { Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_FOLDER, NodeType.NT_FOLDER, admin, true); RepositoryCopier rcp = new RepositoryCopier(); a.addMixin(NodeType.MIX_TITLE); a.setProperty("jcr:title", "Hello"); admin.save(); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/jcr:title", "Hello"); assertProperty(DST_TEST_NODE_PATH + "/jcr:mixinTypes", new String[]{"mix:title"}); } @Test public void testAddMixin() throws IOException, RepositoryException, ConfigurationException { Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_FOLDER, NodeType.NT_FOLDER, admin, true); RepositoryCopier rcp = new RepositoryCopier(); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertNodeExists(DST_TEST_NODE_PATH); assertPropertyMissing(DST_TEST_NODE_PATH + "/jcr:title"); a.addMixin(NodeType.MIX_TITLE); a.setProperty("jcr:title", "Hello"); admin.save(); assertProperty(SRC_TEST_NODE_PATH + "/jcr:title", "Hello"); rcp = new RepositoryCopier(); rcp.setOnlyNewer(false); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/jcr:title", "Hello"); assertProperty(DST_TEST_NODE_PATH + "/jcr:mixinTypes", new String[]{"mix:title"}); } @Test public void testRemoveMixin() throws IOException, RepositoryException, ConfigurationException { Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_FOLDER, NodeType.NT_FOLDER, admin, true); RepositoryCopier rcp = new RepositoryCopier(); a.addMixin(NodeType.MIX_TITLE); a.setProperty("jcr:title", "Hello"); admin.save(); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/jcr:title", "Hello"); assertProperty(DST_TEST_NODE_PATH + "/jcr:mixinTypes", new String[]{"mix:title"}); a.removeMixin(NodeType.MIX_TITLE); admin.save(); // removing a mixing should remove the undeclared properties assertPropertyMissing(SRC_TEST_NODE_PATH + "/jcr:title"); assertPropertyMissingOrEmpty(SRC_TEST_NODE_PATH + "/jcr:mixinTypes"); rcp = new RepositoryCopier(); rcp.setOnlyNewer(false); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertNodeExists(DST_TEST_NODE_PATH); assertPropertyMissing(DST_TEST_NODE_PATH + "/jcr:title"); assertPropertyMissingOrEmpty(DST_TEST_NODE_PATH + "/jcr:mixinTypes"); } @Test public void testOnlyNewer() throws IOException, RepositoryException, ConfigurationException { Calendar now = Calendar.getInstance(); Calendar then = Calendar.getInstance(); then.setTimeInMillis(now.getTimeInMillis() + 1); // create /testroot/src/a/jcr:content with 'now' as last modified Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_FOLDER, NodeType.NT_FILE, admin, false); Node content = a.addNode(Node.JCR_CONTENT, NodeType.NT_UNSTRUCTURED); content.setProperty(Property.JCR_LAST_MODIFIED, now); content.setProperty("p0", "0"); admin.save(); assertProperty(SRC_TEST_NODE_PATH + "/jcr:content/p0", "0"); RepositoryCopier rcp = new RepositoryCopier(); rcp.setOnlyNewer(false); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "0"); // modify property but don't update last modified content.setProperty("p0", "1"); admin.save(); rcp = new RepositoryCopier(); rcp.setOnlyNewer(true); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); // property should still be the old value, since src is not "newer" assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "0"); // now update last modified content.setProperty(Property.JCR_LAST_MODIFIED, then); admin.save(); rcp = new RepositoryCopier(); rcp.setOnlyNewer(true); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); // property should now be the new value, since src is now "newer" assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "1"); } /** * Special test where the source node is nt:file with no mixins, and the destination node is nt:file with a mixin * and properties, and content is updated since it is newer (JCRVLT-87) */ @Test public void testMissingMixinWithNewer() throws IOException, RepositoryException, ConfigurationException { Calendar now = Calendar.getInstance(); Calendar then = Calendar.getInstance(); then.setTimeInMillis(now.getTimeInMillis() + 1); // create /testroot/src/a/jcr:content with 'now' as last modified Node a = JcrUtils.getOrCreateByPath(SRC_TEST_NODE_PATH, NodeType.NT_FOLDER, NodeType.NT_FILE, admin, false); Node content = a.addNode(Node.JCR_CONTENT, NodeType.NT_UNSTRUCTURED); content.setProperty(Property.JCR_LAST_MODIFIED, now); content.setProperty("p0", "0"); admin.save(); assertProperty(SRC_TEST_NODE_PATH + "/jcr:content/p0", "0"); RepositoryCopier rcp = new RepositoryCopier(); rcp.setOnlyNewer(false); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "0"); // modify source property and add mixin to destination content.setProperty("p0", "1"); Node dst = admin.getNode(DST_TEST_NODE_PATH); dst.addMixin(NodeType.MIX_TITLE); dst.setProperty(Property.JCR_TITLE, "Hello"); admin.save(); assertProperty(DST_TEST_NODE_PATH + "/jcr:title", "Hello"); assertProperty(DST_TEST_NODE_PATH + "/jcr:mixinTypes", new String[]{"mix:title"}); // now perform copy rcp = new RepositoryCopier(); rcp.setOnlyNewer(true); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); // property should still be the old value, since src is not "newer" assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "0"); // mixin should already be gone, since file does not have a lastModified. assertPropertyMissingOrEmpty(DST_TEST_NODE_PATH + "/jcr:mixinTypes"); assertPropertyMissing(DST_TEST_NODE_PATH + "/jcr:title"); // now update last modified content.setProperty(Property.JCR_LAST_MODIFIED, then); admin.save(); rcp = new RepositoryCopier(); rcp.setOnlyNewer(true); rcp.setUpdate(true); rcp.copy(admin, SRC_PATH, admin, DST_PATH, true); // property should now be the new value, since src is now "newer" assertProperty(DST_TEST_NODE_PATH + "/jcr:content/p0", "1"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.locator; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.gms.FailureDetector; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.BiMultiValMap; import org.apache.cassandra.utils.Pair; import org.apache.cassandra.utils.SortedBiMultiValMap; public class TokenMetadata { private static final Logger logger = LoggerFactory.getLogger(TokenMetadata.class); /** * Maintains token to endpoint map of every node in the cluster. * Each Token is associated with exactly one Address, but each Address may have * multiple tokens. Hence, the BiMultiValMap collection. */ private final BiMultiValMap<Token, InetAddress> tokenToEndpointMap; /** Maintains endpoint to host ID map of every node in the cluster */ private final BiMap<InetAddress, UUID> endpointToHostIdMap; // Prior to CASSANDRA-603, we just had <tt>Map<Range, InetAddress> pendingRanges<tt>, // which was added to when a node began bootstrap and removed from when it finished. // // This is inadequate when multiple changes are allowed simultaneously. For example, // suppose that there is a ring of nodes A, C and E, with replication factor 3. // Node D bootstraps between C and E, so its pending ranges will be E-A, A-C and C-D. // Now suppose node B bootstraps between A and C at the same time. Its pending ranges // would be C-E, E-A and A-B. Now both nodes need to be assigned pending range E-A, // which we would be unable to represent with the old Map. The same thing happens // even more obviously for any nodes that boot simultaneously between same two nodes. // // So, we made two changes: // // First, we changed pendingRanges to a <tt>Multimap<Range, InetAddress></tt> (now // <tt>Map<String, Multimap<Range, InetAddress>></tt>, because replication strategy // and options are per-KeySpace). // // Second, we added the bootstrapTokens and leavingEndpoints collections, so we can // rebuild pendingRanges from the complete information of what is going on, when // additional changes are made mid-operation. // // Finally, note that recording the tokens of joining nodes in bootstrapTokens also // means we can detect and reject the addition of multiple nodes at the same token // before one becomes part of the ring. private final BiMultiValMap<Token, InetAddress> bootstrapTokens = new BiMultiValMap<>(); // (don't need to record Token here since it's still part of tokenToEndpointMap until it's done leaving) private final Set<InetAddress> leavingEndpoints = new HashSet<>(); // this is a cache of the calculation from {tokenToEndpointMap, bootstrapTokens, leavingEndpoints} private final ConcurrentMap<String, PendingRangeMaps> pendingRanges = new ConcurrentHashMap<String, PendingRangeMaps>(); // nodes which are migrating to the new tokens in the ring private final Set<Pair<Token, InetAddress>> movingEndpoints = new HashSet<>(); /* Use this lock for manipulating the token map */ private final ReadWriteLock lock = new ReentrantReadWriteLock(true); private volatile ArrayList<Token> sortedTokens; private final Topology topology; public final IPartitioner partitioner; private static final Comparator<InetAddress> inetaddressCmp = new Comparator<InetAddress>() { public int compare(InetAddress o1, InetAddress o2) { return ByteBuffer.wrap(o1.getAddress()).compareTo(ByteBuffer.wrap(o2.getAddress())); } }; // signals replication strategies that nodes have joined or left the ring and they need to recompute ownership private volatile long ringVersion = 0; public TokenMetadata() { this(SortedBiMultiValMap.<Token, InetAddress>create(null, inetaddressCmp), HashBiMap.<InetAddress, UUID>create(), new Topology(), DatabaseDescriptor.getPartitioner()); } private TokenMetadata(BiMultiValMap<Token, InetAddress> tokenToEndpointMap, BiMap<InetAddress, UUID> endpointsMap, Topology topology, IPartitioner partitioner) { this.tokenToEndpointMap = tokenToEndpointMap; this.topology = topology; this.partitioner = partitioner; endpointToHostIdMap = endpointsMap; sortedTokens = sortTokens(); } /** * To be used by tests only (via {@link StorageService.setPartitionerUnsafe}). */ @VisibleForTesting public TokenMetadata cloneWithNewPartitioner(IPartitioner newPartitioner) { return new TokenMetadata(tokenToEndpointMap, endpointToHostIdMap, topology, newPartitioner); } private ArrayList<Token> sortTokens() { return new ArrayList<>(tokenToEndpointMap.keySet()); } /** @return the number of nodes bootstrapping into source's primary range */ public int pendingRangeChanges(InetAddress source) { int n = 0; Collection<Range<Token>> sourceRanges = getPrimaryRangesFor(getTokens(source)); lock.readLock().lock(); try { for (Token token : bootstrapTokens.keySet()) for (Range<Token> range : sourceRanges) if (range.contains(token)) n++; } finally { lock.readLock().unlock(); } return n; } /** * Update token map with a single token/endpoint pair in normal state. */ public void updateNormalToken(Token token, InetAddress endpoint) { updateNormalTokens(Collections.singleton(token), endpoint); } public void updateNormalTokens(Collection<Token> tokens, InetAddress endpoint) { Multimap<InetAddress, Token> endpointTokens = HashMultimap.create(); for (Token token : tokens) endpointTokens.put(endpoint, token); updateNormalTokens(endpointTokens); } /** * Update token map with a set of token/endpoint pairs in normal state. * * Prefer this whenever there are multiple pairs to update, as each update (whether a single or multiple) * is expensive (CASSANDRA-3831). */ public void updateNormalTokens(Multimap<InetAddress, Token> endpointTokens) { if (endpointTokens.isEmpty()) return; lock.writeLock().lock(); try { boolean shouldSortTokens = false; for (InetAddress endpoint : endpointTokens.keySet()) { Collection<Token> tokens = endpointTokens.get(endpoint); assert tokens != null && !tokens.isEmpty(); bootstrapTokens.removeValue(endpoint); tokenToEndpointMap.removeValue(endpoint); topology.addEndpoint(endpoint); leavingEndpoints.remove(endpoint); removeFromMoving(endpoint); // also removing this endpoint from moving for (Token token : tokens) { InetAddress prev = tokenToEndpointMap.put(token, endpoint); if (!endpoint.equals(prev)) { if (prev != null) logger.warn("Token {} changing ownership from {} to {}", token, prev, endpoint); shouldSortTokens = true; } } } if (shouldSortTokens) sortedTokens = sortTokens(); } finally { lock.writeLock().unlock(); } } /** * Store an end-point to host ID mapping. Each ID must be unique, and * cannot be changed after the fact. */ public void updateHostId(UUID hostId, InetAddress endpoint) { assert hostId != null; assert endpoint != null; lock.writeLock().lock(); try { InetAddress storedEp = endpointToHostIdMap.inverse().get(hostId); if (storedEp != null) { if (!storedEp.equals(endpoint) && (FailureDetector.instance.isAlive(storedEp))) { throw new RuntimeException(String.format("Host ID collision between active endpoint %s and %s (id=%s)", storedEp, endpoint, hostId)); } } UUID storedId = endpointToHostIdMap.get(endpoint); if ((storedId != null) && (!storedId.equals(hostId))) logger.warn("Changing {}'s host ID from {} to {}", endpoint, storedId, hostId); endpointToHostIdMap.forcePut(endpoint, hostId); } finally { lock.writeLock().unlock(); } } /** Return the unique host ID for an end-point. */ public UUID getHostId(InetAddress endpoint) { lock.readLock().lock(); try { return endpointToHostIdMap.get(endpoint); } finally { lock.readLock().unlock(); } } /** Return the end-point for a unique host ID */ public InetAddress getEndpointForHostId(UUID hostId) { lock.readLock().lock(); try { return endpointToHostIdMap.inverse().get(hostId); } finally { lock.readLock().unlock(); } } /** @return a copy of the endpoint-to-id map for read-only operations */ public Map<InetAddress, UUID> getEndpointToHostIdMapForReading() { lock.readLock().lock(); try { Map<InetAddress, UUID> readMap = new HashMap<>(); readMap.putAll(endpointToHostIdMap); return readMap; } finally { lock.readLock().unlock(); } } @Deprecated public void addBootstrapToken(Token token, InetAddress endpoint) { addBootstrapTokens(Collections.singleton(token), endpoint); } public void addBootstrapTokens(Collection<Token> tokens, InetAddress endpoint) { assert tokens != null && !tokens.isEmpty(); assert endpoint != null; lock.writeLock().lock(); try { InetAddress oldEndpoint; for (Token token : tokens) { oldEndpoint = bootstrapTokens.get(token); if (oldEndpoint != null && !oldEndpoint.equals(endpoint)) throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token); oldEndpoint = tokenToEndpointMap.get(token); if (oldEndpoint != null && !oldEndpoint.equals(endpoint)) throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token); } bootstrapTokens.removeValue(endpoint); for (Token token : tokens) bootstrapTokens.put(token, endpoint); } finally { lock.writeLock().unlock(); } } public void removeBootstrapTokens(Collection<Token> tokens) { assert tokens != null && !tokens.isEmpty(); lock.writeLock().lock(); try { for (Token token : tokens) bootstrapTokens.remove(token); } finally { lock.writeLock().unlock(); } } public void addLeavingEndpoint(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { leavingEndpoints.add(endpoint); } finally { lock.writeLock().unlock(); } } /** * Add a new moving endpoint * @param token token which is node moving to * @param endpoint address of the moving node */ public void addMovingEndpoint(Token token, InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { movingEndpoints.add(Pair.create(token, endpoint)); } finally { lock.writeLock().unlock(); } } public void removeEndpoint(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { bootstrapTokens.removeValue(endpoint); tokenToEndpointMap.removeValue(endpoint); topology.removeEndpoint(endpoint); leavingEndpoints.remove(endpoint); endpointToHostIdMap.remove(endpoint); sortedTokens = sortTokens(); invalidateCachedRings(); } finally { lock.writeLock().unlock(); } } /** * This is called when the snitch properties for this endpoint are updated, see CASSANDRA-10238. */ public void updateTopology(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { logger.info("Updating topology for {}", endpoint); topology.updateEndpoint(endpoint); invalidateCachedRings(); } finally { lock.writeLock().unlock(); } } /** * This is called when the snitch properties for many endpoints are updated, it will update * the topology mappings of any endpoints whose snitch has changed, see CASSANDRA-10238. */ public void updateTopology() { lock.writeLock().lock(); try { logger.info("Updating topology for all endpoints that have changed"); topology.updateEndpoints(); invalidateCachedRings(); } finally { lock.writeLock().unlock(); } } /** * Remove pair of token/address from moving endpoints * @param endpoint address of the moving node */ public void removeFromMoving(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { for (Pair<Token, InetAddress> pair : movingEndpoints) { if (pair.right.equals(endpoint)) { movingEndpoints.remove(pair); break; } } invalidateCachedRings(); } finally { lock.writeLock().unlock(); } } public Collection<Token> getTokens(InetAddress endpoint) { assert endpoint != null; assert isMember(endpoint); // don't want to return nulls lock.readLock().lock(); try { return new ArrayList<>(tokenToEndpointMap.inverse().get(endpoint)); } finally { lock.readLock().unlock(); } } @Deprecated public Token getToken(InetAddress endpoint) { return getTokens(endpoint).iterator().next(); } public boolean isMember(InetAddress endpoint) { assert endpoint != null; lock.readLock().lock(); try { return tokenToEndpointMap.inverse().containsKey(endpoint); } finally { lock.readLock().unlock(); } } public boolean isLeaving(InetAddress endpoint) { assert endpoint != null; lock.readLock().lock(); try { return leavingEndpoints.contains(endpoint); } finally { lock.readLock().unlock(); } } public boolean isMoving(InetAddress endpoint) { assert endpoint != null; lock.readLock().lock(); try { for (Pair<Token, InetAddress> pair : movingEndpoints) { if (pair.right.equals(endpoint)) return true; } return false; } finally { lock.readLock().unlock(); } } private final AtomicReference<TokenMetadata> cachedTokenMap = new AtomicReference<>(); /** * Create a copy of TokenMetadata with only tokenToEndpointMap. That is, pending ranges, * bootstrap tokens and leaving endpoints are not included in the copy. */ public TokenMetadata cloneOnlyTokenMap() { lock.readLock().lock(); try { return new TokenMetadata(SortedBiMultiValMap.create(tokenToEndpointMap, null, inetaddressCmp), HashBiMap.create(endpointToHostIdMap), new Topology(topology), partitioner); } finally { lock.readLock().unlock(); } } /** * Return a cached TokenMetadata with only tokenToEndpointMap, i.e., the same as cloneOnlyTokenMap but * uses a cached copy that is invalided when the ring changes, so in the common case * no extra locking is required. * * Callers must *NOT* mutate the returned metadata object. */ public TokenMetadata cachedOnlyTokenMap() { TokenMetadata tm = cachedTokenMap.get(); if (tm != null) return tm; // synchronize to prevent thundering herd (CASSANDRA-6345) synchronized (this) { if ((tm = cachedTokenMap.get()) != null) return tm; tm = cloneOnlyTokenMap(); cachedTokenMap.set(tm); return tm; } } /** * Create a copy of TokenMetadata with tokenToEndpointMap reflecting situation after all * current leave operations have finished. * * @return new token metadata */ public TokenMetadata cloneAfterAllLeft() { lock.readLock().lock(); try { TokenMetadata allLeftMetadata = cloneOnlyTokenMap(); for (InetAddress endpoint : leavingEndpoints) allLeftMetadata.removeEndpoint(endpoint); return allLeftMetadata; } finally { lock.readLock().unlock(); } } /** * Create a copy of TokenMetadata with tokenToEndpointMap reflecting situation after all * current leave, and move operations have finished. * * @return new token metadata */ public TokenMetadata cloneAfterAllSettled() { lock.readLock().lock(); try { TokenMetadata metadata = cloneOnlyTokenMap(); for (InetAddress endpoint : leavingEndpoints) metadata.removeEndpoint(endpoint); for (Pair<Token, InetAddress> pair : movingEndpoints) metadata.updateNormalToken(pair.left, pair.right); return metadata; } finally { lock.readLock().unlock(); } } public InetAddress getEndpoint(Token token) { lock.readLock().lock(); try { return tokenToEndpointMap.get(token); } finally { lock.readLock().unlock(); } } public Collection<Range<Token>> getPrimaryRangesFor(Collection<Token> tokens) { Collection<Range<Token>> ranges = new ArrayList<>(tokens.size()); for (Token right : tokens) ranges.add(new Range<>(getPredecessor(right), right)); return ranges; } @Deprecated public Range<Token> getPrimaryRangeFor(Token right) { return getPrimaryRangesFor(Arrays.asList(right)).iterator().next(); } public ArrayList<Token> sortedTokens() { return sortedTokens; } public Multimap<Range<Token>, InetAddress> getPendingRangesMM(String keyspaceName) { Multimap<Range<Token>, InetAddress> map = HashMultimap.create(); PendingRangeMaps pendingRangeMaps = this.pendingRanges.get(keyspaceName); if (pendingRangeMaps != null) { for (Map.Entry<Range<Token>, List<InetAddress>> entry : pendingRangeMaps) { Range<Token> range = entry.getKey(); for (InetAddress address : entry.getValue()) { map.put(range, address); } } } return map; } /** a mutable map may be returned but caller should not modify it */ public PendingRangeMaps getPendingRanges(String keyspaceName) { return this.pendingRanges.get(keyspaceName); } public List<Range<Token>> getPendingRanges(String keyspaceName, InetAddress endpoint) { List<Range<Token>> ranges = new ArrayList<>(); for (Map.Entry<Range<Token>, InetAddress> entry : getPendingRangesMM(keyspaceName).entries()) { if (entry.getValue().equals(endpoint)) { ranges.add(entry.getKey()); } } return ranges; } /** * Calculate pending ranges according to bootsrapping and leaving nodes. Reasoning is: * * (1) When in doubt, it is better to write too much to a node than too little. That is, if * there are multiple nodes moving, calculate the biggest ranges a node could have. Cleaning * up unneeded data afterwards is better than missing writes during movement. * (2) When a node leaves, ranges for other nodes can only grow (a node might get additional * ranges, but it will not lose any of its current ranges as a result of a leave). Therefore * we will first remove _all_ leaving tokens for the sake of calculation and then check what * ranges would go where if all nodes are to leave. This way we get the biggest possible * ranges with regard current leave operations, covering all subsets of possible final range * values. * (3) When a node bootstraps, ranges of other nodes can only get smaller. Without doing * complex calculations to see if multiple bootstraps overlap, we simply base calculations * on the same token ring used before (reflecting situation after all leave operations have * completed). Bootstrapping nodes will be added and removed one by one to that metadata and * checked what their ranges would be. This will give us the biggest possible ranges the * node could have. It might be that other bootstraps make our actual final ranges smaller, * but it does not matter as we can clean up the data afterwards. * * NOTE: This is heavy and ineffective operation. This will be done only once when a node * changes state in the cluster, so it should be manageable. */ public void calculatePendingRanges(AbstractReplicationStrategy strategy, String keyspaceName) { lock.readLock().lock(); try { PendingRangeMaps newPendingRanges = new PendingRangeMaps(); if (bootstrapTokens.isEmpty() && leavingEndpoints.isEmpty() && movingEndpoints.isEmpty()) { if (logger.isTraceEnabled()) logger.trace("No bootstrapping, leaving or moving nodes -> empty pending ranges for {}", keyspaceName); pendingRanges.put(keyspaceName, newPendingRanges); return; } Multimap<InetAddress, Range<Token>> addressRanges = strategy.getAddressRanges(); // Copy of metadata reflecting the situation after all leave operations are finished. TokenMetadata allLeftMetadata = cloneAfterAllLeft(); // get all ranges that will be affected by leaving nodes Set<Range<Token>> affectedRanges = new HashSet<Range<Token>>(); for (InetAddress endpoint : leavingEndpoints) affectedRanges.addAll(addressRanges.get(endpoint)); // for each of those ranges, find what new nodes will be responsible for the range when // all leaving nodes are gone. TokenMetadata metadata = cloneOnlyTokenMap(); // don't do this in the loop! #7758 for (Range<Token> range : affectedRanges) { Set<InetAddress> currentEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(range.right, metadata)); Set<InetAddress> newEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(range.right, allLeftMetadata)); for (InetAddress address : Sets.difference(newEndpoints, currentEndpoints)) { newPendingRanges.addPendingRange(range, address); } } // At this stage newPendingRanges has been updated according to leave operations. We can // now continue the calculation by checking bootstrapping nodes. // For each of the bootstrapping nodes, simply add and remove them one by one to // allLeftMetadata and check in between what their ranges would be. Multimap<InetAddress, Token> bootstrapAddresses = bootstrapTokens.inverse(); for (InetAddress endpoint : bootstrapAddresses.keySet()) { Collection<Token> tokens = bootstrapAddresses.get(endpoint); allLeftMetadata.updateNormalTokens(tokens, endpoint); for (Range<Token> range : strategy.getAddressRanges(allLeftMetadata).get(endpoint)) { newPendingRanges.addPendingRange(range, endpoint); } allLeftMetadata.removeEndpoint(endpoint); } // At this stage newPendingRanges has been updated according to leaving and bootstrapping nodes. // We can now finish the calculation by checking moving nodes. // For each of the moving nodes, we do the same thing we did for bootstrapping: // simply add and remove them one by one to allLeftMetadata and check in between what their ranges would be. for (Pair<Token, InetAddress> moving : movingEndpoints) { //Calculate all the ranges which will could be affected. This will include the ranges before and after the move. Set<Range<Token>> moveAffectedRanges = new HashSet<>(); InetAddress endpoint = moving.right; // address of the moving node //Add ranges before the move for (Range<Token> range : strategy.getAddressRanges(allLeftMetadata).get(endpoint)) { moveAffectedRanges.add(range); } allLeftMetadata.updateNormalToken(moving.left, endpoint); //Add ranges after the move for (Range<Token> range : strategy.getAddressRanges(allLeftMetadata).get(endpoint)) { moveAffectedRanges.add(range); } for(Range<Token> range : moveAffectedRanges) { Set<InetAddress> currentEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(range.right, metadata)); Set<InetAddress> newEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(range.right, allLeftMetadata)); Set<InetAddress> difference = Sets.difference(newEndpoints, currentEndpoints); for(final InetAddress address : difference) { Collection<Range<Token>> newRanges = strategy.getAddressRanges(allLeftMetadata).get(address); Collection<Range<Token>> oldRanges = strategy.getAddressRanges(metadata).get(address); //We want to get rid of any ranges which the node is currently getting. newRanges.removeAll(oldRanges); for(Range<Token> newRange : newRanges) { for(Range<Token> pendingRange : newRange.subtractAll(oldRanges)) { newPendingRanges.addPendingRange(pendingRange, address); } } } } allLeftMetadata.removeEndpoint(endpoint); } pendingRanges.put(keyspaceName, newPendingRanges); if (logger.isTraceEnabled()) logger.trace("Pending ranges:\n{}", (pendingRanges.isEmpty() ? "<empty>" : printPendingRanges())); } finally { lock.readLock().unlock(); } } public Token getPredecessor(Token token) { List<Token> tokens = sortedTokens(); int index = Collections.binarySearch(tokens, token); // assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", "); if (index < 0) index = -index-1; return index == 0 ? tokens.get(tokens.size() - 1) : tokens.get(index - 1); } public Token getSuccessor(Token token) { List<Token> tokens = sortedTokens(); int index = Collections.binarySearch(tokens, token); // assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", "); if (index < 0) return (Token) tokens.get(-index-1); return (index == (tokens.size() - 1)) ? tokens.get(0) : tokens.get(index + 1); } /** @return a copy of the bootstrapping tokens map */ public BiMultiValMap<Token, InetAddress> getBootstrapTokens() { lock.readLock().lock(); try { return new BiMultiValMap<Token, InetAddress>(bootstrapTokens); } finally { lock.readLock().unlock(); } } public Set<InetAddress> getAllEndpoints() { lock.readLock().lock(); try { return ImmutableSet.copyOf(endpointToHostIdMap.keySet()); } finally { lock.readLock().unlock(); } } /** caller should not modify leavingEndpoints */ public Set<InetAddress> getLeavingEndpoints() { lock.readLock().lock(); try { return ImmutableSet.copyOf(leavingEndpoints); } finally { lock.readLock().unlock(); } } /** * Endpoints which are migrating to the new tokens * @return set of addresses of moving endpoints */ public Set<Pair<Token, InetAddress>> getMovingEndpoints() { lock.readLock().lock(); try { return ImmutableSet.copyOf(movingEndpoints); } finally { lock.readLock().unlock(); } } public static int firstTokenIndex(final ArrayList<Token> ring, Token start, boolean insertMin) { assert ring.size() > 0; // insert the minimum token (at index == -1) if we were asked to include it and it isn't a member of the ring int i = Collections.binarySearch(ring, start); if (i < 0) { i = (i + 1) * (-1); if (i >= ring.size()) i = insertMin ? -1 : 0; } return i; } public static Token firstToken(final ArrayList<Token> ring, Token start) { return ring.get(firstTokenIndex(ring, start, false)); } /** * iterator over the Tokens in the given ring, starting with the token for the node owning start * (which does not have to be a Token in the ring) * @param includeMin True if the minimum token should be returned in the ring even if it has no owner. */ public static Iterator<Token> ringIterator(final ArrayList<Token> ring, Token start, boolean includeMin) { if (ring.isEmpty()) return includeMin ? Iterators.singletonIterator(start.getPartitioner().getMinimumToken()) : Collections.emptyIterator(); final boolean insertMin = includeMin && !ring.get(0).isMinimum(); final int startIndex = firstTokenIndex(ring, start, insertMin); return new AbstractIterator<Token>() { int j = startIndex; protected Token computeNext() { if (j < -1) return endOfData(); try { // return minimum for index == -1 if (j == -1) return start.getPartitioner().getMinimumToken(); // return ring token for other indexes return ring.get(j); } finally { j++; if (j == ring.size()) j = insertMin ? -1 : 0; if (j == startIndex) // end iteration j = -2; } } }; } /** used by tests */ public void clearUnsafe() { lock.writeLock().lock(); try { tokenToEndpointMap.clear(); endpointToHostIdMap.clear(); bootstrapTokens.clear(); leavingEndpoints.clear(); pendingRanges.clear(); movingEndpoints.clear(); sortedTokens.clear(); topology.clear(); invalidateCachedRings(); } finally { lock.writeLock().unlock(); } } public String toString() { StringBuilder sb = new StringBuilder(); lock.readLock().lock(); try { Set<InetAddress> eps = tokenToEndpointMap.inverse().keySet(); if (!eps.isEmpty()) { sb.append("Normal Tokens:"); sb.append(System.getProperty("line.separator")); for (InetAddress ep : eps) { sb.append(ep); sb.append(':'); sb.append(tokenToEndpointMap.inverse().get(ep)); sb.append(System.getProperty("line.separator")); } } if (!bootstrapTokens.isEmpty()) { sb.append("Bootstrapping Tokens:" ); sb.append(System.getProperty("line.separator")); for (Map.Entry<Token, InetAddress> entry : bootstrapTokens.entrySet()) { sb.append(entry.getValue()).append(':').append(entry.getKey()); sb.append(System.getProperty("line.separator")); } } if (!leavingEndpoints.isEmpty()) { sb.append("Leaving Endpoints:"); sb.append(System.getProperty("line.separator")); for (InetAddress ep : leavingEndpoints) { sb.append(ep); sb.append(System.getProperty("line.separator")); } } if (!pendingRanges.isEmpty()) { sb.append("Pending Ranges:"); sb.append(System.getProperty("line.separator")); sb.append(printPendingRanges()); } } finally { lock.readLock().unlock(); } return sb.toString(); } private String printPendingRanges() { StringBuilder sb = new StringBuilder(); for (PendingRangeMaps pendingRangeMaps : pendingRanges.values()) { sb.append(pendingRangeMaps.printPendingRanges()); } return sb.toString(); } public Collection<InetAddress> pendingEndpointsFor(Token token, String keyspaceName) { PendingRangeMaps pendingRangeMaps = this.pendingRanges.get(keyspaceName); if (pendingRangeMaps == null) return Collections.emptyList(); return pendingRangeMaps.pendingEndpointsFor(token); } /** * @deprecated retained for benefit of old tests */ public Collection<InetAddress> getWriteEndpoints(Token token, String keyspaceName, Collection<InetAddress> naturalEndpoints) { return ImmutableList.copyOf(Iterables.concat(naturalEndpoints, pendingEndpointsFor(token, keyspaceName))); } /** @return an endpoint to token multimap representation of tokenToEndpointMap (a copy) */ public Multimap<InetAddress, Token> getEndpointToTokenMapForReading() { lock.readLock().lock(); try { Multimap<InetAddress, Token> cloned = HashMultimap.create(); for (Map.Entry<Token, InetAddress> entry : tokenToEndpointMap.entrySet()) cloned.put(entry.getValue(), entry.getKey()); return cloned; } finally { lock.readLock().unlock(); } } /** * @return a (stable copy, won't be modified) Token to Endpoint map for all the normal and bootstrapping nodes * in the cluster. */ public Map<Token, InetAddress> getNormalAndBootstrappingTokenToEndpointMap() { lock.readLock().lock(); try { Map<Token, InetAddress> map = new HashMap<>(tokenToEndpointMap.size() + bootstrapTokens.size()); map.putAll(tokenToEndpointMap); map.putAll(bootstrapTokens); return map; } finally { lock.readLock().unlock(); } } /** * @return the Topology map of nodes to DCs + Racks * * This is only allowed when a copy has been made of TokenMetadata, to avoid concurrent modifications * when Topology methods are subsequently used by the caller. */ public Topology getTopology() { assert this != StorageService.instance.getTokenMetadata(); return topology; } public long getRingVersion() { return ringVersion; } public void invalidateCachedRings() { ringVersion++; cachedTokenMap.set(null); } public DecoratedKey decorateKey(ByteBuffer key) { return partitioner.decorateKey(key); } /** * Tracks the assignment of racks and endpoints in each datacenter for all the "normal" endpoints * in this TokenMetadata. This allows faster calculation of endpoints in NetworkTopologyStrategy. */ public static class Topology { /** multi-map of DC to endpoints in that DC */ private final Multimap<String, InetAddress> dcEndpoints; /** map of DC to multi-map of rack to endpoints in that rack */ private final Map<String, Multimap<String, InetAddress>> dcRacks; /** reverse-lookup map for endpoint to current known dc/rack assignment */ private final Map<InetAddress, Pair<String, String>> currentLocations; Topology() { dcEndpoints = HashMultimap.create(); dcRacks = new HashMap<>(); currentLocations = new HashMap<>(); } void clear() { dcEndpoints.clear(); dcRacks.clear(); currentLocations.clear(); } /** * construct deep-copy of other */ Topology(Topology other) { dcEndpoints = HashMultimap.create(other.dcEndpoints); dcRacks = new HashMap<>(); for (String dc : other.dcRacks.keySet()) dcRacks.put(dc, HashMultimap.create(other.dcRacks.get(dc))); currentLocations = new HashMap<>(other.currentLocations); } /** * Stores current DC/rack assignment for ep */ void addEndpoint(InetAddress ep) { IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch(); String dc = snitch.getDatacenter(ep); String rack = snitch.getRack(ep); Pair<String, String> current = currentLocations.get(ep); if (current != null) { if (current.left.equals(dc) && current.right.equals(rack)) return; doRemoveEndpoint(ep, current); } doAddEndpoint(ep, dc, rack); } private void doAddEndpoint(InetAddress ep, String dc, String rack) { dcEndpoints.put(dc, ep); if (!dcRacks.containsKey(dc)) dcRacks.put(dc, HashMultimap.<String, InetAddress>create()); dcRacks.get(dc).put(rack, ep); currentLocations.put(ep, Pair.create(dc, rack)); } /** * Removes current DC/rack assignment for ep */ void removeEndpoint(InetAddress ep) { if (!currentLocations.containsKey(ep)) return; doRemoveEndpoint(ep, currentLocations.remove(ep)); } private void doRemoveEndpoint(InetAddress ep, Pair<String, String> current) { dcRacks.get(current.left).remove(current.right, ep); dcEndpoints.remove(current.left, ep); } void updateEndpoint(InetAddress ep) { IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch(); if (snitch == null || !currentLocations.containsKey(ep)) return; updateEndpoint(ep, snitch); } void updateEndpoints() { IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch(); if (snitch == null) return; for (InetAddress ep : currentLocations.keySet()) updateEndpoint(ep, snitch); } private void updateEndpoint(InetAddress ep, IEndpointSnitch snitch) { Pair<String, String> current = currentLocations.get(ep); String dc = snitch.getDatacenter(ep); String rack = snitch.getRack(ep); if (dc.equals(current.left) && rack.equals(current.right)) return; doRemoveEndpoint(ep, current); doAddEndpoint(ep, dc, rack); } /** * @return multi-map of DC to endpoints in that DC */ public Multimap<String, InetAddress> getDatacenterEndpoints() { return dcEndpoints; } /** * @return map of DC to multi-map of rack to endpoints in that rack */ public Map<String, Multimap<String, InetAddress>> getDatacenterRacks() { return dcRacks; } /** * @return The DC and rack of the given endpoint. */ public Pair<String, String> getLocation(InetAddress addr) { return currentLocations.get(addr); } } }
/* * Copyright 2016 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import static com.google.javascript.jscomp.testing.CodeSubTree.findClassDefinition; import static com.google.javascript.rhino.testing.NodeSubject.assertNode; import com.google.common.collect.ImmutableList; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.colors.Color; import com.google.javascript.jscomp.colors.ColorId; import com.google.javascript.jscomp.colors.StandardColors; import com.google.javascript.jscomp.testing.CodeSubTree; import com.google.javascript.jscomp.testing.NoninjectingCompiler; import com.google.javascript.jscomp.testing.TestExternsBuilder; import com.google.javascript.rhino.Node; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class RewriteAsyncFunctionsTest extends CompilerTestCase { private static final String EXTERNS_BASE = new TestExternsBuilder().addArguments().addJSCompLibraries().build(); public RewriteAsyncFunctionsTest() { super(EXTERNS_BASE); } @Override @Before public void setUp() throws Exception { super.setUp(); setLanguageOut(LanguageMode.ECMASCRIPT3); enableTypeCheck(); enableTypeInfoValidation(); replaceTypesWithColors(); } @Override protected CompilerPass getProcessor(Compiler compiler) { return RewriteAsyncFunctions.create(compiler); } // Don't let the compiler actually inject any code. // It just makes the expected output hard to read and write. @Override protected Compiler createCompiler() { return new NoninjectingCompiler(); } @Override protected NoninjectingCompiler getLastCompiler() { return (NoninjectingCompiler) super.getLastCompiler(); } private final Color getGlobalColor(ColorId colorId) { return getLastCompiler().getColorRegistry().get(colorId); } private final Color getGlobalInstanceColor(String globalClassName) { return Color.createUnion( findClassDefinition(getLastCompiler(), globalClassName) .getRootNode() .getColor() .getInstanceColors()); } @Test public void testDefaultParameterUsingThis() { test( lines( "class X {", " /**", " * @param {number} a", " */", " constructor(a) {", " /** @const */ this.a = a;", " }", " /**", " * @param {number} b", " * @return {!Promise<number>}", " */", " async m(b = this.a) {", " return this.a + b;", " }", "}"), lines( "class X {", " constructor(a) {", " /** @const */ this.a = a;", " }", " m(b = this.a) {", // this in parameter default value doesn't get changed " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return $jscomp$async$this.a + b;", " });", " }", "}")); Color classXInstanceType = getGlobalInstanceColor("X"); ImmutableList<Node> thisAliasNameReferences = findClassDefinition(getLastCompiler(), "X") .findMethodDefinition("m") .findMatchingQNameReferences("$jscomp$async$this"); assertThat(thisAliasNameReferences).hasSize(2); // const $jscomp$async$this = this; // confirm that `this` and `$jscomp$async$this` nodes have the right types in declaration Node aliasDeclarationReference = thisAliasNameReferences.get(0); assertNode(aliasDeclarationReference).hasColorThat().isEqualTo(classXInstanceType); Node thisNode = aliasDeclarationReference.getOnlyChild(); assertNode(thisNode).isThis().hasColorThat().isEqualTo(classXInstanceType); // make sure the single reference to $jscomp$async$this has the right type assertNode(thisAliasNameReferences.get(1)).hasColorThat().isEqualTo(classXInstanceType); } @Test public void testInnerArrowFunctionUsingThis() { test( lines( "class X {", " async m() {", " return new Promise((resolve, reject) => {", " return this;", " });", " }", "}"), lines( "class X {", " m() {", " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return new Promise((resolve, reject) => {", " return $jscomp$async$this;", " });", " });", " }", "}")); Color classXInstanceType = getGlobalInstanceColor("X"); ImmutableList<Node> thisAliasNameReferences = findClassDefinition(getLastCompiler(), "X") .findMethodDefinition("m") .findMatchingQNameReferences("$jscomp$async$this"); assertThat(thisAliasNameReferences).hasSize(2); // const $jscomp$async$this = this; // confirm that `this` and `$jscomp$async$this` nodes have the right types in declaration Node aliasDeclarationReference = thisAliasNameReferences.get(0); assertNode(aliasDeclarationReference).hasColorThat().isEqualTo(classXInstanceType); Node thisNode = aliasDeclarationReference.getOnlyChild(); assertNode(thisNode).isThis().hasColorThat().isEqualTo(classXInstanceType); // make sure the single reference to $jscomp$async$this has the right type assertNode(thisAliasNameReferences.get(1)).hasColorThat().isEqualTo(classXInstanceType); } @Test public void testInnerSuperCall() { test( externs(new TestExternsBuilder().addPromise().addJSCompLibraries().build()), srcs( lines( "class A {", " m() {", " return Promise.resolve(this);", " }", "}", "class X extends A {", " async m() {", " return super.m();", " }", "}")), expected( lines( "class A {", " m() {", " return Promise.resolve(this);", " }", "}", "class X extends A {", " m() {", " const $jscomp$async$this = this;", " const $jscomp$async$super$get$m = () => super.m;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return $jscomp$async$super$get$m().call($jscomp$async$this);", " });", " }", "}"))); Color classAInstanceType = getGlobalInstanceColor("A"); // type of A.prototype.m Color classAPropertyMType = findClassDefinition(getLastCompiler(), "A") .findMethodDefinition("m") .getRootNode() .getColor(); CodeSubTree classXMethodMDefinition = findClassDefinition(getLastCompiler(), "X").findMethodDefinition("m"); // Check type information on wrapper function for `super.m` ImmutableList<Node> superMethodWrapperNameNodes = classXMethodMDefinition.findMatchingQNameReferences("$jscomp$async$super$get$m"); // one declaration and one reference assertThat(superMethodWrapperNameNodes).hasSize(2); // first name node is declaration // const $jscomp$async$super$get$m = () => super.m; Node wrapperDeclarationNameNode = superMethodWrapperNameNodes.get(0); Node wrapperArrowFunction = wrapperDeclarationNameNode.getOnlyChild(); // optimization colors don't track function signatures assertNode(wrapperArrowFunction) .isArrowFunction() .hasColorThat() .isEqualTo(StandardColors.TOP_OBJECT); // wrapper function variable has type matching the function itself Color wrapperArrowColor = wrapperArrowFunction.getColor(); assertNode(wrapperDeclarationNameNode).hasColorThat().isEqualTo(wrapperArrowColor); // get `super.m` from `() => `super.m` Node superDotM = wrapperArrowFunction.getLastChild(); assertNode(superDotM) .matchesQualifiedName("super.m") .hasColorThat() .isEqualTo(classAPropertyMType); Node superNode = superDotM.getFirstChild(); assertNode(superNode).isSuper().hasColorThat().isEqualTo(classAInstanceType); // second name node is reference // return $jscomp$async$super$get$m().call($jscomp$async$this); Node wrapperReferenceNameNode = superMethodWrapperNameNodes.get(1); // optimization colors don't track function signatures assertNode(wrapperArrowFunction).hasColorThat().isEqualTo(StandardColors.TOP_OBJECT); // `$jscomp$async$super$get$m()` Node wrapperCallNode = wrapperReferenceNameNode.getParent(); assertNode(wrapperCallNode).isCall().hasColorThat().isEqualTo(classAPropertyMType); // `$jscomp$async$super$get$m().call($jscomp$async$this)` Node methodCallNode = wrapperCallNode.getGrandparent(); // optimization colors don't track .call types assertNode(methodCallNode).isCall().hasColorThat().isEqualTo(StandardColors.UNKNOWN); } @Test public void testInnerSuperReference() { test( externs(new TestExternsBuilder().addFunction().addJSCompLibraries().build()), srcs( lines( "class A {", " m() {", " return this;", " }", "}", "class X extends A {", " async m() {", " const tmp = super.m;", " return tmp.call(null);", " }", "}")), expected( lines( "class A {", " m() {", " return this;", " }", "}", "class X extends A {", " m() {", " const $jscomp$async$super$get$m = () => super.m;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " const tmp = $jscomp$async$super$get$m();", // type of tmp will indicate it requires `this` be provided, but will allow null. " return tmp.call(null);", " });", " }", "}"))); // type of A.prototype.m Color classAPropertyMType = findClassDefinition(getLastCompiler(), "A") .findMethodDefinition("m") .getRootNode() .getColor(); Color classAInstanceType = getGlobalInstanceColor("A"); CodeSubTree classXMethodMDefinition = findClassDefinition(getLastCompiler(), "X").findMethodDefinition("m"); // Check type information on wrapper function for `super.m` ImmutableList<Node> superMethodWrapperNameNodes = classXMethodMDefinition.findMatchingQNameReferences("$jscomp$async$super$get$m"); // one declaration and one reference assertThat(superMethodWrapperNameNodes).hasSize(2); // first name node is declaration // const $jscomp$async$super$get$m = () => super.m; Node wrapperDeclarationNameNode = superMethodWrapperNameNodes.get(0); // arrow function has a Color representing a object Node wrapperArrowFunction = wrapperDeclarationNameNode.getOnlyChild(); assertNode(wrapperArrowFunction) .isArrowFunction() .hasColorThat() .isEqualTo(StandardColors.TOP_OBJECT); // wrapper function variable has type matching the function itself Color wrapperArrowColor = wrapperArrowFunction.getColor(); assertNode(wrapperDeclarationNameNode).hasColorThat().isEqualTo(wrapperArrowColor); // get `super.m` from `() => `super.m` Node superDotM = wrapperArrowFunction.getLastChild(); assertNode(superDotM) .matchesQualifiedName("super.m") .hasColorThat() .isEqualTo(classAPropertyMType); Node superNode = superDotM.getFirstChild(); assertNode(superNode).hasColorThat().isEqualTo(classAInstanceType); // second name node is reference // const tmp = $jscomp$async$super$get$m(); Node wrapperReferenceNameNode = superMethodWrapperNameNodes.get(1); // optimization colors don't track function signatures assertNode(wrapperReferenceNameNode).hasColorThat().isEqualTo(StandardColors.TOP_OBJECT); // `$jscomp$async$super$get$m()` Node wrapperCallNode = wrapperReferenceNameNode.getParent(); assertNode(wrapperCallNode).isCall().hasColorThat().isEqualTo(classAPropertyMType); } @Test public void testMultipleSuperAccessesInAsyncFunction_havingNonIdenticalUnknownTypes() { test( lines( "class UpdatingElement {", " getUpdateComplete() {", " }", "}", "", "class TextFieldBase extends UpdatingElement {", " async _getUpdateComplete() {", " if (super.getUpdateComplete) {", // `?` type " await super.getUpdateComplete();", // `??` type " }", " }", "}"), lines( "class UpdatingElement {", " getUpdateComplete() {", " }", "}", "class TextFieldBase extends UpdatingElement {", " _getUpdateComplete() {", " const $jscomp$async$this = this;", " const $jscomp$async$super$get$getUpdateComplete = () => super.getUpdateComplete;", " return $jscomp.asyncExecutePromiseGeneratorFunction(function*() {", " if ($jscomp$async$super$get$getUpdateComplete()) {", " yield $jscomp$async$super$get$getUpdateComplete().call($jscomp$async$this);", " }", " });", " }", "}")); } @Test public void testNestedArrowFunctionUsingThis() { test( lines( "class X {", " m() {", " return async () => (() => this);", " }", "}"), lines( "class X {", " m() {", " return () => {", " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return () => $jscomp$async$this;", " })", " }", " }", "}")); } @Test public void testInnerArrowFunctionUsingArguments() { test( externs(new TestExternsBuilder().addArguments().addJSCompLibraries().build()), srcs( lines( "class X {", " async m() {", " return new Promise((resolve, reject) => {", " return arguments;", " });", " }", "}")), expected( lines( "class X {", " m() {", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return new Promise((resolve, reject) => {", " return $jscomp$async$arguments", " });", " });", " }", "}"))); ImmutableList<Node> argumentsAliasRefs = findClassDefinition(getLastCompiler(), "X") .findMethodDefinition("m") .findMatchingQNameReferences("$jscomp$async$arguments"); assertThat(argumentsAliasRefs).hasSize(2); // one declaration and 1 use Color argumentsColor = getGlobalColor(StandardColors.ARGUMENTS_ID); // declaration reference // const $jscomp$async$arguments = arguments; Node argumentsAliasDeclaration = argumentsAliasRefs.get(0); Node argumentsValue = argumentsAliasDeclaration.getOnlyChild(); assertNode(argumentsValue) .matchesQualifiedName("arguments") .hasColorThat() .isEqualTo(argumentsColor); assertNode(argumentsAliasDeclaration) .matchesQualifiedName("$jscomp$async$arguments") .hasColorThat() .isEqualTo(argumentsColor); // usage reference // return $jscomp$async$arguments; Node argumentsAliasUsage = argumentsAliasRefs.get(1); assertNode(argumentsAliasUsage) .matchesQualifiedName("$jscomp$async$arguments") .hasColorThat() .isEqualTo(argumentsColor); } @Test public void testAwaitReplacement() { test( "async function foo(promise) { return await promise; }", lines( "function foo(promise) {", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return yield promise;", " });", "}")); } @Test public void testArgumentsReplacement_topLevelCode() { testSame("arguments;"); } @Test public void testArgumentsReplacement_normalFunction() { testSame("function f(a, b, ...rest) { return arguments.length; }"); } @Test public void testArgumentsReplacement_asyncFunction() { test( "async function f(a, b, ...rest) { return arguments.length; }", lines( "function f(a, b, ...rest) {", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return $jscomp$async$arguments.length;", // arguments replaced " });", "}")); } @Test public void testArgumentsReplacement_asyncClosure() { test( lines( "function outer() {", " /**", " * @param {...?} var_args", " * @return {!Promise<number>}", " */", " async function f(var_args) { return arguments.length; }", " return f(arguments)", "}"), lines( "function outer() {", " function f(var_args) {", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return $jscomp$async$arguments.length;", // arguments replaced " });", " }", " return f(arguments)", // unchanged "}")); } @Test public void testArgumentsReplacement_normalClosureInAsync() { test( externs(new TestExternsBuilder().addFunction().addJSCompLibraries().build()), srcs( lines( "async function a() {", " function inner() {", " return arguments.length;", " }", " return inner.apply(undefined, arguments);", // this should get replaced "}")), expected( lines( "function a() {", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " function inner() {", " return arguments.length;", // unchanged " }", " return inner.apply(undefined, $jscomp$async$arguments);", " });", "}"))); } @Test public void testClassMethod() { test( lines( "class A {", " /**", " * @param {number} x", " */", " constructor(x) {", " /** @type {number} */ this.x = x;", " }", " async f() {", " return this.x;", " }", "}"), lines( "class A {", " constructor(x) {", " this.x = x;", " }", " f() {", " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function *() {", " return $jscomp$async$this.x;", // this replaced " });", " }", "}")); } @Test public void testAsyncClassMethodWithAsyncArrow() { test( externs(new TestExternsBuilder().addConsole().addJSCompLibraries().build()), srcs( lines( "class A {", " async f() {", " let g = async () => { console.log(this, arguments); };", " g();", " }", "}")), expected( lines( "class A {", " f() {", " const $jscomp$async$this = this;", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function *() {", " let g = () => {", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function *() {", " console.log($jscomp$async$this, $jscomp$async$arguments);", " });", " };", " g();", " });", " }", "}"))); } @Test public void testNonAsyncClassMethodWithAsyncArrow() { test( externs(new TestExternsBuilder().addConsole().addJSCompLibraries().build()), srcs( lines( "class A {", " f() {", " let g = async () => { console.log(this, arguments); };", " g();", " }", "}")), expected( lines( "class A {", " f() {", " let g = () => {", " const $jscomp$async$this = this;", " const $jscomp$async$arguments = arguments;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function *() {", " console.log($jscomp$async$this, $jscomp$async$arguments);", " });", " };", " g();", " }", "}"))); } @Test public void testArrowFunctionExpressionBody() { test( "let f = async () => 1;", lines( "let f = () => {", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return 1;", " });", "}")); } @Test public void testGlobalScopeArrowFunctionRefersToThis() { test( "let f = async () => this;", lines( "let f = () => {", " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return $jscomp$async$this;", " });", "}")); } @Test public void testGlobalScopeAsyncArrowFunctionDefaultParamValueRefersToThis() { test( "let f = async (t = this) => t;", lines( "let f = (t = this) => {", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return t;", " });", "}")); } @Test public void testNestedAsyncArrowFunctionDefaultParamValueRefersToThis() { test( lines("let f = async function(outerT = this) {", " return async (t = this) => t;", "};"), lines( // `this` is not aliased here "let f = function(outerT = this) {", " const $jscomp$async$this = this;", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", // `this` is aliased here " return (t = $jscomp$async$this) => {", " return $jscomp.asyncExecutePromiseGeneratorFunction(", " function* () {", " return t;", " });", " };", " });", "};", "")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client.integration; import junit.framework.*; import net.sf.json.*; import org.apache.ignite.cache.store.*; import org.apache.ignite.compute.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.client.*; import org.apache.ignite.internal.client.ssl.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.spi.swapspace.file.*; import org.apache.ignite.testframework.junits.common.*; import org.jetbrains.annotations.*; import javax.cache.configuration.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import static org.apache.ignite.IgniteSystemProperties.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*; /** * Tests for Java client. */ @SuppressWarnings("deprecation") public abstract class ClientAbstractSelfTest extends GridCommonAbstractTest { /** */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** */ private static final String CACHE_NAME = "cache"; /** */ public static final String HOST = "127.0.0.1"; /** */ public static final int JETTY_PORT = 8080; /** */ public static final int BINARY_PORT = 11212; /** Path to jetty config. */ public static final String REST_JETTY_CFG = "modules/clients/src/test/resources/jetty/rest-jetty.xml"; /** Need to be static because configuration inits only once per class. */ private static final ConcurrentMap<Object, Object> INTERCEPTED_OBJECTS = new ConcurrentHashMap<>(); /** */ private static final Map<String, HashMapStore> cacheStores = new HashMap<>(); /** */ public static final String ROUTER_LOG_CFG = "modules/core/src/test/config/log4j-test.xml"; /** */ private static final String INTERCEPTED_SUF = "intercepted"; /** */ private static final String[] TASK_ARGS = new String[] {"executing", "test", "task"}; /** Flag indicating whether intercepted objects should be overwritten. */ private static volatile boolean overwriteIntercepted; /** */ private ExecutorService exec; /** */ protected GridClient client; /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { System.setProperty(IGNITE_JETTY_PORT, Integer.toString(JETTY_PORT)); startGrid(); System.clearProperty(IGNITE_JETTY_PORT); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopGrid(); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { exec = Executors.newCachedThreadPool(); client = client(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { U.shutdownNow(ClientAbstractSelfTest.class, exec, log); exec = null; if (client != null) GridClientFactory.stop(client.id(), true); client = null; synchronized (cacheStores) { for (HashMapStore cacheStore : cacheStores.values()) cacheStore.map.clear(); } grid().cache(null).clear(); grid().cache(CACHE_NAME).clear(); INTERCEPTED_OBJECTS.clear(); } /** * Gets protocol which should be used in client connection. * * @return Protocol. */ protected abstract GridClientProtocol protocol(); /** * Gets server address to which client should connect. * * @return Server address in format "host:port". */ protected abstract String serverAddress(); /** * @return Whether SSL should be used in test. */ protected abstract boolean useSsl(); /** * @return SSL context factory used in test. */ protected abstract GridSslContextFactory sslContextFactory(); /** * Get task name. * * @return Task name. */ protected String getTaskName() { return TestTask.class.getName(); } /** * @return name of the sleep task for current test. */ protected String getSleepTaskName() { return SleepTestTask.class.getName(); } /** * Get task argument. * * @return Task argument. */ protected Object getTaskArgument() { return Arrays.asList(TASK_ARGS); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); cfg.setLocalHost(HOST); assert cfg.getConnectorConfiguration() == null; ConnectorConfiguration clientCfg = new ConnectorConfiguration(); clientCfg.setPort(BINARY_PORT); if (useSsl()) { clientCfg.setSslEnabled(true); clientCfg.setSslContextFactory(sslContextFactory()); } cfg.setConnectorConfiguration(clientCfg); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(disco); cfg.setCacheConfiguration(cacheConfiguration(null), cacheConfiguration("replicated"), cacheConfiguration("partitioned"), cacheConfiguration(CACHE_NAME)); clientCfg.setMessageInterceptor(new ConnectorMessageInterceptor() { @Override public Object onReceive(@Nullable Object obj) { if (obj != null) INTERCEPTED_OBJECTS.put(obj, obj); return overwriteIntercepted && obj instanceof String ? obj + INTERCEPTED_SUF : obj; } @Override public Object onSend(Object obj) { if (obj != null) INTERCEPTED_OBJECTS.put(obj, obj); return obj; } }); // Specify swap SPI, otherwise test fails on windows. cfg.setSwapSpaceSpi(new FileSwapSpaceSpi()); return cfg; } /** * @param cacheName Cache name. * @return Cache configuration. * @throws Exception In case of error. */ @SuppressWarnings("unchecked") private static CacheConfiguration cacheConfiguration(@Nullable final String cacheName) throws Exception { CacheConfiguration cfg = defaultCacheConfiguration(); cfg.setCacheMode(cacheName == null || CACHE_NAME.equals(cacheName) ? LOCAL : "replicated".equals(cacheName) ? REPLICATED : PARTITIONED); cfg.setName(cacheName); cfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setCacheStoreFactory(new Factory<CacheStore>() { @Override public CacheStore create() { synchronized (cacheStores) { HashMapStore cacheStore = cacheStores.get(cacheName); if (cacheStore == null) cacheStores.put(cacheName, cacheStore = new HashMapStore()); return cacheStore; } } }); cfg.setWriteThrough(true); cfg.setReadThrough(true); cfg.setLoadPreviousValue(true); cfg.setSwapEnabled(true); if (cfg.getCacheMode() == PARTITIONED) cfg.setBackups(1); return cfg; } /** * @return Client. * @throws GridClientException In case of error. */ protected GridClient client() throws GridClientException { return GridClientFactory.start(clientConfiguration()); } /** * @return Test client configuration. */ protected GridClientConfiguration clientConfiguration() throws GridClientException { GridClientConfiguration cfg = new GridClientConfiguration(); GridClientDataConfiguration nullCache = new GridClientDataConfiguration(); GridClientDataConfiguration cache = new GridClientDataConfiguration(); cache.setName(CACHE_NAME); cfg.setDataConfigurations(Arrays.asList(nullCache, cache)); cfg.setProtocol(protocol()); cfg.setServers(Arrays.asList(serverAddress())); // Setting custom executor, to avoid failures on client shutdown. // And applying custom naming scheme to ease debugging. cfg.setExecutorService(Executors.newCachedThreadPool(new ThreadFactory() { private AtomicInteger cntr = new AtomicInteger(); @SuppressWarnings("NullableProblems") @Override public Thread newThread(Runnable r) { return new Thread(r, "client-worker-thread-" + cntr.getAndIncrement()); } })); if (useSsl()) cfg.setSslContextFactory(sslContextFactory()); return cfg; } /** * @throws Exception If failed. */ public void testConnectable() throws Exception { GridClient client = client(); List<GridClientNode> nodes = client.compute().refreshTopology(false, false); assertTrue(F.first(nodes).connectable()); } /** * Check async API methods don't generate exceptions. * * @throws Exception If failed. */ public void testNoAsyncExceptions() throws Exception { GridClient client = client(); GridClientData data = client.data(CACHE_NAME); GridClientCompute compute = client.compute().projection(new GridClientPredicate<GridClientNode>() { @Override public boolean apply(GridClientNode e) { return false; } }); Map<String, GridClientFuture<?>> futs = new LinkedHashMap<>(); futs.put("exec", compute.executeAsync("taskName", "taskArg")); futs.put("affExec", compute.affinityExecuteAsync("taskName", "cacheName", "affKey", "taskArg")); futs.put("refreshById", compute.refreshNodeAsync(UUID.randomUUID(), true, true)); futs.put("refreshByIP", compute.refreshNodeAsync("nodeIP", true, true)); futs.put("refreshTop", compute.refreshTopologyAsync(true, true)); GridClientFactory.stop(client.id(), false); futs.put("put", data.putAsync("key", "val")); futs.put("putAll", data.putAllAsync(F.asMap("key", "val"))); futs.put("get", data.getAsync("key")); futs.put("getAll", data.getAllAsync(Arrays.asList("key"))); futs.put("remove", data.removeAsync("key")); futs.put("removeAll", data.removeAllAsync(Arrays.asList("key"))); futs.put("replace", data.replaceAsync("key", "val")); futs.put("cas", data.casAsync("key", "val", "val2")); futs.put("metrics", data.metricsAsync()); for (Map.Entry<String, GridClientFuture<?>> e : futs.entrySet()) { try { e.getValue().get(); info("Expects '" + e.getKey() + "' fails with grid client exception."); } catch (GridServerUnreachableException |GridClientClosedException ignore) { // No op: compute projection is empty. } } } /** * @throws Exception If failed. */ public void testGracefulShutdown() throws Exception { GridClientCompute compute = client.compute(); Object taskArg = getTaskArgument(); String taskName = getSleepTaskName(); GridClientFuture<Object> fut = compute.executeAsync(taskName, taskArg); GridClientFuture<Object> fut2 = compute.executeAsync(taskName, taskArg); GridClientFactory.stop(client.id(), true); Assert.assertEquals(17, fut.get()); Assert.assertEquals(17, fut2.get()); } /** * @throws Exception If failed. */ public void testForceShutdown() throws Exception { GridClientCompute compute = client.compute(); Object taskArg = getTaskArgument(); String taskName = getSleepTaskName(); GridClientFuture<Object> fut = compute.executeAsync(taskName, taskArg); GridClientFactory.stop(client.id(), false); try { fut.get(); } catch (GridClientClosedException ignored) { return; } Assert.fail("Expected GridClientClosedException."); } /** * @throws Exception If failed. */ public void testShutdown() throws Exception { GridClient c = client(); GridClientCompute compute = c.compute(); String taskName = getTaskName(); Object taskArg = getTaskArgument(); Collection<GridClientFuture<Object>> futs = new ArrayList<>(); // Validate connection works. compute.execute(taskName, taskArg); info(">>> First task executed successfully, running batch."); for (int i = 0; i < 10; i++) futs.add(compute.executeAsync(taskName, taskArg)); // Stop client. GridClientFactory.stop(c.id(), true); info(">>> Completed stop request."); int failed = 0; for (GridClientFuture<Object> fut : futs) { try { assertEquals(17, fut.get()); } catch (GridClientException e) { failed++; log.warning("Task execution failed.", e); } } assertEquals(0, failed); } /** * @throws Exception If failed. */ public void testExecute() throws Exception { String taskName = getTaskName(); Object taskArg = getTaskArgument(); GridClientCompute compute = client.compute(); Assert.assertEquals(new Integer(17), compute.execute(taskName, taskArg)); Assert.assertEquals(new Integer(17), compute.executeAsync(taskName, taskArg).get()); } /** * @throws Exception If failed. */ public void testTopology() throws Exception { GridClientCompute compute = client.compute(); List<GridClientNode> top = compute.refreshTopology(true, true); assertNotNull(top); assertEquals(1, top.size()); GridClientNode node = F.first(top); assertNotNull(node); assertFalse(node.attributes().isEmpty()); assertNotNull(node.tcpAddresses()); assertEquals(grid().localNode().id(), node.nodeId()); assertNotNull(node.metrics()); top = compute.refreshTopology(false, false); node = F.first(top); assertNotNull(top); assertEquals(1, top.size()); assertNull(node.metrics()); assertTrue(node.attributes().isEmpty()); node = F.first(top); assertNotNull(node); assertTrue(node.attributes().isEmpty()); assertNull(node.metrics()); assertNotNull(node.tcpAddresses()); assertEquals(grid().localNode().id(), node.nodeId()); top = compute.refreshTopologyAsync(true, true).get(); assertNotNull(top); assertEquals(1, top.size()); node = F.first(top); assertNotNull(node); assertFalse(node.attributes().isEmpty()); assertNotNull(node.metrics()); assertNotNull(node.tcpAddresses()); assertEquals(grid().localNode().id(), node.nodeId()); top = compute.refreshTopologyAsync(false, false).get(); assertNotNull(top); assertEquals(1, top.size()); node = F.first(top); assertNotNull(node); assertTrue(node.attributes().isEmpty()); assertNull(node.metrics()); assertNotNull(node.tcpAddresses()); assertEquals(grid().localNode().id(), node.nodeId()); } /** * Test task. */ private static class TestTask extends ComputeTaskSplitAdapter<List<Object>, Integer> { /** {@inheritDoc} */ @Override protected Collection<? extends ComputeJob> split(int gridSize, List<Object> list) { Collection<ComputeJobAdapter> jobs = new ArrayList<>(); if (list != null) for (final Object val : list) jobs.add(new ComputeJobAdapter() { @Override public Object execute() { try { Thread.sleep(1); } catch (InterruptedException ignored) { Thread.currentThread().interrupt(); } return val == null ? 0 : val.toString().length(); } }); return jobs; } /** {@inheritDoc} */ @Override public Integer reduce(List<ComputeJobResult> results) { int sum = 0; for (ComputeJobResult res : results) sum += res.<Integer>getData(); return sum; } } /** * Test task that sleeps 5 seconds. */ private static class SleepTestTask extends ComputeTaskSplitAdapter<List<Object>, Integer> { /** {@inheritDoc} */ @Override protected Collection<? extends ComputeJob> split(int gridSize, List<Object> list) { Collection<ComputeJobAdapter> jobs = new ArrayList<>(); if (list != null) for (final Object val : list) jobs.add(new ComputeJobAdapter() { @Override public Object execute() { try { Thread.sleep(5000); return val == null ? 0 : val.toString().length(); } catch (InterruptedException ignored) { return -1; } } }); return jobs; } /** {@inheritDoc} */ @Override public Integer reduce(List<ComputeJobResult> results) { int sum = 0; for (ComputeJobResult res : results) sum += res.<Integer>getData(); return sum; } } /** * Http test task with restriction to string arguments only. */ protected static class HttpTestTask extends ComputeTaskSplitAdapter<String, Integer> { private final TestTask delegate = new TestTask(); /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected Collection<? extends ComputeJob> split(int gridSize, String arg) { if (arg.endsWith("intercepted")) arg = arg.substring(0, arg.length() - 11); JSON json = JSONSerializer.toJSON(arg); List list = json.isArray() ? JSONArray.toList((JSONArray)json, String.class, new JsonConfig()) : null; return delegate.split(gridSize, list); } /** {@inheritDoc} */ @Override public Integer reduce(List<ComputeJobResult> results) { return delegate.reduce(results); } } /** * Http wrapper for sleep task. */ protected static class SleepHttpTestTask extends ComputeTaskSplitAdapter<String, Integer> { private final SleepTestTask delegate = new SleepTestTask(); /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected Collection<? extends ComputeJob> split(int gridSize, String arg) { JSON json = JSONSerializer.toJSON(arg); List list = json.isArray() ? JSONArray.toList((JSONArray)json, String.class, new JsonConfig()) : null; return delegate.split(gridSize, list); } /** {@inheritDoc} */ @Override public Integer reduce(List<ComputeJobResult> results) { return delegate.reduce(results); } } /** * Simple HashMap based cache store emulation. */ private static class HashMapStore extends CacheStoreAdapter<Object, Object> { /** Map for cache store. */ private final Map<Object, Object> map = new HashMap<>(); /** {@inheritDoc} */ @Override public void loadCache(IgniteBiInClosure<Object, Object> clo, Object... args) { for (Map.Entry e : map.entrySet()) { clo.apply(e.getKey(), e.getValue()); } } /** {@inheritDoc} */ @Override public Object load(Object key) { return map.get(key); } /** {@inheritDoc} */ @Override public void write(javax.cache.Cache.Entry<? extends Object, ? extends Object> e) { map.put(e.getKey(), e.getValue()); } /** {@inheritDoc} */ @Override public void delete(Object key) { map.remove(key); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.keymap.impl.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.search.SearchUtil; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.QuickList; import com.intellij.openapi.actionSystem.impl.ActionMenu; import com.intellij.openapi.keymap.KeyMapBundle; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.openapi.keymap.impl.KeymapImpl; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.GraphicsConfig; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.changes.issueLinks.TreeLinkMouseListener; import com.intellij.ui.*; import com.intellij.ui.scale.JBUIScale; import com.intellij.ui.treeStructure.Tree; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.util.SmartList; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.PlatformColors; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.accessibility.AccessibleContextUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.ui.tree.WideSelectionTreeUI; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleContext; import javax.swing.*; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.*; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.util.List; import java.util.*; public final class ActionsTree { private static final Icon EMPTY_ICON = EmptyIcon.ICON_18; private static final Icon CLOSE_ICON = AllIcons.Nodes.Folder; private final SimpleTextAttributes GRAY_LINK = new SimpleTextAttributes(SimpleTextAttributes.STYLE_UNDERLINE, JBColor.gray); private final JTree myTree; private DefaultMutableTreeNode myRoot; private final JScrollPane myComponent; private Keymap myKeymap; private Group myMainGroup = new Group("", null, null); private final boolean myShowBoundActions = Registry.is("keymap.show.alias.actions"); @NonNls private static final String ROOT = "ROOT"; private String myFilter = null; private Condition<? super AnAction> myBaseFilter; private final Map<String, String> myPluginNames = ActionsTreeUtil.createPluginActionsMap(); public ActionsTree() { myRoot = new DefaultMutableTreeNode(ROOT); myTree = new Tree(new MyModel(myRoot)) { @Override public void paint(Graphics g) { super.paint(g); Rectangle visibleRect = getVisibleRect(); Insets insets = getInsets(); if (insets != null && insets.right > 0) { visibleRect.width -= JBUIScale.scale(9); } Rectangle clip = g.getClipBounds(); for (int row = 0; row < getRowCount(); row++) { Rectangle rowBounds = getRowBounds(row); rowBounds.x = 0; rowBounds.width = Integer.MAX_VALUE; if (rowBounds.intersects(clip)) { Object node = getPathForRow(row).getLastPathComponent(); if (node instanceof DefaultMutableTreeNode) { Object data = ((DefaultMutableTreeNode)node).getUserObject(); if (!(data instanceof Hyperlink)) { Rectangle fullRowRect = new Rectangle(visibleRect.x, rowBounds.y, visibleRect.width, rowBounds.height); paintRowData(this, data, fullRowRect, (Graphics2D)g); } } } } } @Override public String convertValueToText(Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { if (value instanceof DefaultMutableTreeNode) { String path = ActionsTree.this.getPath((DefaultMutableTreeNode)value, true); return StringUtil.notNullize(path); } return super.convertValueToText(value, selected, expanded, leaf, row, hasFocus); } }; myTree.setRootVisible(false); myTree.setShowsRootHandles(true); myTree.putClientProperty(WideSelectionTreeUI.STRIPED_CLIENT_PROPERTY, Boolean.TRUE); myTree.setCellRenderer(new KeymapsRenderer()); new TreeLinkMouseListener(new KeymapsRenderer()) { @Override protected boolean doCacheLastNode() { return false; } @Override protected void handleTagClick(@Nullable Object tag, @NotNull MouseEvent event) { if (tag instanceof Hyperlink) { ((Hyperlink)tag).onClick(event); } } }.installOn(myTree); myTree.addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseMoved(MouseEvent e) { String description = getDescription(e); ActionMenu.showDescriptionInStatusBar(description != null, myTree, description); } @Nullable @NlsActions.ActionDescription private String getDescription(@NotNull MouseEvent e) { TreePath path = myTree.getPathForLocation(e.getX(), e.getY()); DefaultMutableTreeNode node = path == null ? null : (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node == null ? null : node.getUserObject(); if (!(userObject instanceof String)) { return null; } AnAction action = ActionManager.getInstance().getActionOrStub((String)userObject); return action == null ? null : action.getTemplatePresentation().getDescription(); } }); myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); myComponent = ScrollPaneFactory.createScrollPane(myTree, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); } // silently replace current map void setKeymap(@NotNull Keymap keymap) { myKeymap = keymap; } public void setBaseFilter(@Nullable Condition<? super AnAction> baseFilter) { myBaseFilter = baseFilter; } public JComponent getComponent() { return myComponent; } public void addTreeSelectionListener(TreeSelectionListener l) { myTree.getSelectionModel().addTreeSelectionListener(l); } @Nullable private Object getSelectedObject() { TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath == null) return null; return ((DefaultMutableTreeNode)selectionPath.getLastPathComponent()).getUserObject(); } @Nullable public String getSelectedActionId() { Object userObject = getSelectedObject(); if (userObject instanceof String) return (String)userObject; if (userObject instanceof QuickList) return ((QuickList)userObject).getActionId(); if (userObject instanceof Group) return ((Group)userObject).getId(); return null; } public void reset(@NotNull Keymap keymap, QuickList @NotNull [] allQuickLists) { reset(keymap, allQuickLists, myFilter, null); } public void reset(@NotNull Keymap keymap, QuickList @NotNull [] allQuickLists, @Nullable Shortcut shortcut) { reset(keymap, allQuickLists, myFilter, shortcut); } public Group getMainGroup() { return myMainGroup; } public JTree getTree(){ return myTree; } public void filter(final String filter, final QuickList[] currentQuickListIds) { myFilter = filter; reset(myKeymap, currentQuickListIds, filter, null); } private @Nullable Condition<? super AnAction> combineWithBaseFilter(@Nullable Condition<? super AnAction> actionFilter) { if (actionFilter != null) return myBaseFilter != null ? Conditions.and(myBaseFilter, actionFilter) : actionFilter; return myBaseFilter; } private void reset(@NotNull Keymap keymap, QuickList @NotNull [] allQuickLists, String filter, @Nullable Shortcut shortcut) { myKeymap = keymap; final PathsKeeper pathsKeeper = new PathsKeeper(); pathsKeeper.storePaths(); myRoot.removeAllChildren(); ActionManager actionManager = ActionManager.getInstance(); Project project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(myComponent)); Condition<? super AnAction> condFilter = combineWithBaseFilter(ActionsTreeUtil.isActionFiltered(actionManager, keymap, shortcut, filter, true)); Group mainGroup = ActionsTreeUtil.createMainGroup(project, keymap, allQuickLists, filter, true, condFilter); if ((filter != null && filter.length() > 0 || shortcut != null) && mainGroup.initIds().isEmpty()) { condFilter = combineWithBaseFilter(ActionsTreeUtil.isActionFiltered(actionManager, keymap, shortcut, filter, false)); mainGroup = ActionsTreeUtil.createMainGroup(project, keymap, allQuickLists, filter, false, condFilter); } myRoot = ActionsTreeUtil.createNode(mainGroup); myMainGroup = mainGroup; MyModel model = (MyModel)myTree.getModel(); model.setRoot(myRoot); model.nodeStructureChanged(myRoot); pathsKeeper.restorePaths(); getComponent().repaint(); } public void filterTree(Shortcut shortcut, QuickList[] currentQuickListIds) { reset(myKeymap, currentQuickListIds, myFilter, shortcut); } private class MyModel extends DefaultTreeModel implements TreeTableModel { protected MyModel(DefaultMutableTreeNode root) { super(root); } @Override public void setTree(JTree tree) { } @Override public int getColumnCount() { return 2; } @Override public String getColumnName(int column) { switch (column) { case 0: return KeyMapBundle.message("action.column.name"); case 1: return KeyMapBundle.message("shortcuts.column.name"); } return ""; } @Override public Object getValueAt(Object value, int column) { if (!(value instanceof DefaultMutableTreeNode)) { return "???"; } if (column == 0) { return value; } else if (column == 1) { Object userObject = ((DefaultMutableTreeNode)value).getUserObject(); if (userObject instanceof QuickList) { userObject = ((QuickList)userObject).getActionId(); } return userObject instanceof String ? KeymapUtil.getShortcutsText(myKeymap.getShortcuts((String)userObject)) : ""; } else { return "???"; } } @Override public Object getChild(Object parent, int index) { return ((TreeNode)parent).getChildAt(index); } @Override public int getChildCount(Object parent) { return ((TreeNode)parent).getChildCount(); } @Override public Class getColumnClass(int column) { if (column == 0) { return TreeTableModel.class; } else { return Object.class; } } @Override public boolean isCellEditable(Object node, int column) { return column == 0; } @Override public void setValueAt(Object aValue, Object node, int column) { } } public static boolean isShortcutCustomized(@NotNull String actionId, @NotNull Keymap keymap) { if (!keymap.canModify()) return false; // keymap is not customized Keymap parent = keymap.getParent(); return parent != null && !Arrays.equals(parent.getShortcuts(actionId), keymap.getShortcuts(actionId)); } private static boolean areGroupShortcutsCustomized(@NotNull Group group, @NotNull Keymap keymap) { if (!keymap.canModify()) return false; ArrayList children = group.getChildren(); for (Object child : children) { if (child instanceof Group) { if (areGroupShortcutsCustomized((Group)child, keymap)) { return true; } } else if (child instanceof String) { String actionId = (String)child; if (isShortcutCustomized(actionId, keymap)) { return true; } } else if (child instanceof QuickList) { String actionId = ((QuickList)child).getActionId(); if (isShortcutCustomized(actionId, keymap)) { return true; } } } return group.getId() != null && isShortcutCustomized(group.getId(), keymap); } public void selectAction(String actionId) { String path = myMainGroup.getActionQualifiedPath(actionId, false); String boundId = path == null ? KeymapManagerEx.getInstanceEx().getActionBinding(actionId) : null; if (path == null && boundId != null) { path = myMainGroup.getActionQualifiedPath(boundId, false); if (path == null) { return; } } final DefaultMutableTreeNode node = getNodeForPath(path); if (node == null) { return; } TreeUtil.selectInTree(node, true, myTree); } @Nullable private DefaultMutableTreeNode getNodeForPath(String path) { Enumeration enumeration = ((DefaultMutableTreeNode)myTree.getModel().getRoot()).preorderEnumeration(); while (enumeration.hasMoreElements()) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)enumeration.nextElement(); if (Objects.equals(getPath(node, false), path)) { return node; } } return null; } private List<DefaultMutableTreeNode> getNodesByPaths(List<String> paths) { List<DefaultMutableTreeNode> result = new SmartList<>(); Enumeration enumeration = ((DefaultMutableTreeNode)myTree.getModel().getRoot()).preorderEnumeration(); while (enumeration.hasMoreElements()) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)enumeration.nextElement(); final String path = getPath(node, false); if (paths.contains(path)) { result.add(node); } } return result; } @Nullable private String getPath(DefaultMutableTreeNode node, boolean presentable) { final Object userObject = node.getUserObject(); if (userObject instanceof String) { String actionId = (String)userObject; final TreeNode parent = node.getParent(); if (parent instanceof DefaultMutableTreeNode) { final Object object = ((DefaultMutableTreeNode)parent).getUserObject(); if (object instanceof Group) { return ((Group)object).getActionQualifiedPath(actionId, presentable); } } return myMainGroup.getActionQualifiedPath(actionId, presentable); } if (userObject instanceof Group) { return ((Group)userObject).getQualifiedPath(presentable); } if (userObject instanceof QuickList) { return ((QuickList)userObject).getName(); } return null; } public static Icon getEvenIcon(Icon icon) { LayeredIcon layeredIcon = new LayeredIcon(2); layeredIcon.setIcon(EMPTY_ICON, 0); if (icon != null && icon.getIconHeight() <= EMPTY_ICON.getIconHeight() && icon.getIconWidth() <= EMPTY_ICON.getIconWidth()) { layeredIcon .setIcon(icon, 1, (-icon.getIconWidth() + EMPTY_ICON.getIconWidth()) / 2, (EMPTY_ICON.getIconHeight() - icon.getIconHeight()) / 2); } return layeredIcon; } private class PathsKeeper { private ArrayList<String> myPathsToExpand; private ArrayList<String> mySelectionPaths; public void storePaths() { myPathsToExpand = new ArrayList<>(); mySelectionPaths = new ArrayList<>(); DefaultMutableTreeNode root = (DefaultMutableTreeNode)myTree.getModel().getRoot(); TreePath path = new TreePath(root.getPath()); if (myTree.isPathSelected(path)){ addPathToList(root, mySelectionPaths); } if (myTree.isExpanded(path) || root.getChildCount() == 0){ addPathToList(root, myPathsToExpand); _storePaths(root); } } private void addPathToList(DefaultMutableTreeNode root, ArrayList<? super String> list) { String path = getPath(root, false); if (!StringUtil.isEmpty(path)) { list.add(path); } } private void _storePaths(DefaultMutableTreeNode root) { ArrayList<TreeNode> childNodes = childrenToArray(root); for (final Object childNode1 : childNodes) { DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)childNode1; TreePath path = new TreePath(childNode.getPath()); if (myTree.isPathSelected(path)) { addPathToList(childNode, mySelectionPaths); } if ((myTree.isExpanded(path) || childNode.getChildCount() == 0) && !childNode.isLeaf()) { addPathToList(childNode, myPathsToExpand); _storePaths(childNode); } } } public void restorePaths() { for (DefaultMutableTreeNode node : getNodesByPaths(myPathsToExpand)) { myTree.expandPath(new TreePath(node.getPath())); } if (myTree.getSelectionModel().getSelectionCount() == 0) { List<DefaultMutableTreeNode> nodesToSelect = getNodesByPaths(mySelectionPaths); if (!nodesToSelect.isEmpty()) { for (DefaultMutableTreeNode node : nodesToSelect) { TreeUtil.selectNode(myTree, node); } } else { myTree.setSelectionRow(0); } } } private ArrayList<TreeNode> childrenToArray(DefaultMutableTreeNode node) { ArrayList<TreeNode> arrayList = new ArrayList<>(); for(int i = 0; i < node.getChildCount(); i++){ arrayList.add(node.getChildAt(i)); } return arrayList; } } private class KeymapsRenderer extends ColoredTreeCellRenderer { private final MyColoredTreeCellRenderer myLink = new MyColoredTreeCellRenderer(); private boolean myHaveLink; private int myLinkOffset; private int myLinkWidth; private int myRow; // Make sure that the text rendered by this method is 'searchable' via com.intellij.openapi.keymap.impl.ui.ActionsTree.filter method. @Override public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { myRow = row; myHaveLink = false; myLink.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); final boolean showIcons = UISettings.getInstance().getShowIconsInMenus(); Icon icon = null; String text; @NlsSafe String actionId = null; String boundId = null; @NlsActions.ActionText String boundText = null; setToolTipText(null); if (value instanceof DefaultMutableTreeNode) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)value; Object userObject = node.getUserObject(); boolean changed; if (userObject instanceof Group) { Group group = (Group)userObject; actionId = group.getId(); text = group.getName(); changed = myKeymap != null && areGroupShortcutsCustomized(group, myKeymap); icon = group.getIcon(); if (icon == null){ icon = CLOSE_ICON; } } else if (userObject instanceof String) { actionId = (String)userObject; boundId = ((KeymapImpl)myKeymap).hasShortcutDefined(actionId) ? null : KeymapManagerEx.getInstanceEx().getActionBinding(actionId); ActionManager manager = ActionManager.getInstance(); AnAction action = manager.getAction(actionId); text = getActionText(action, actionId); if (action != null) { Icon actionIcon = action.getTemplatePresentation().getIcon(); if (actionIcon != null) { icon = actionIcon; } setToolTipText(action.getTemplatePresentation().getDescription()); } boundText = boundId == null ? null : getActionText(manager.getAction(boundId), boundId); changed = myKeymap != null && isShortcutCustomized(actionId, myKeymap); } else if (userObject instanceof QuickList) { QuickList list = (QuickList)userObject; icon = null; // AllIcons.Actions.QuickList; text = list.getName(); changed = myKeymap != null && isShortcutCustomized(list.getActionId(), myKeymap); } else if (userObject instanceof Separator) { // TODO[vova,anton]: beautify changed = false; text = "-------------"; } else if (userObject instanceof Hyperlink) { getIpad().right = 0; myLink.getIpad().left = 0; myHaveLink = true; Hyperlink link = (Hyperlink)userObject; changed = false; text = ""; append(link.getLinkText(), link.getTextAttributes(), link); icon = link.getIcon(); setIcon(getEvenIcon(link.getIcon())); Rectangle treeVisibleRect = tree.getVisibleRect(); int rowX = TreeUtil.getNodeRowX(tree, row); setupLinkDimensions(treeVisibleRect, rowX); } else { throw new IllegalArgumentException("unknown userObject: " + userObject); } if (showIcons) { setIcon(getEvenIcon(icon)); } Color foreground; if (selected) { foreground = UIUtil.getTreeForeground(true, hasFocus); } else { if (changed) { foreground = PlatformColors.BLUE; } else { foreground = UIUtil.getTreeForeground(); } } if (!myHaveLink) { Color background = UIUtil.getTreeBackground(selected, true); SearchUtil.appendFragments(myFilter, text, SimpleTextAttributes.STYLE_PLAIN, foreground, background, this); if (boundId != null) { append(" "); append(IdeBundle.message("uses.shortcut.of"), SimpleTextAttributes.GRAY_ATTRIBUTES); append(" "); ActionHyperlink link = new ActionHyperlink(boundId, boundText); append(link.getLinkText(), link.getTextAttributes(), link); } if (actionId != null && UISettings.getInstance().getShowInplaceCommentsInternal()) { @NlsSafe String pluginName = myPluginNames.get(actionId); if (pluginName != null) { Group parentGroup = (Group)((DefaultMutableTreeNode)node.getParent()).getUserObject(); if (pluginName.equals(parentGroup.getName())) pluginName = null; } append(" "); append(pluginName != null ? actionId + " (" + pluginName + ")" : actionId, SimpleTextAttributes.GRAYED_SMALL_ATTRIBUTES); } } } putClientProperty(ExpandableItemsHandler.RENDERER_DISABLED, myHaveLink); } @NlsActions.ActionText private String getActionText(@Nullable AnAction action, @NlsSafe String actionId) { String text = action == null ? null : action.getTemplatePresentation().getText(); if (text == null || text.length() == 0) { //fill dynamic presentation gaps text = actionId; } return text; } private void setupLinkDimensions(Rectangle treeVisibleRect, int rowX) { Dimension linkSize = myLink.getPreferredSize(); myLinkWidth = linkSize.width; myLinkOffset = Math.min(super.getPreferredSize().width - 1, treeVisibleRect.x + treeVisibleRect.width - myLinkWidth - rowX); } @Override public void append(@NotNull String fragment, @NotNull SimpleTextAttributes attributes, Object tag) { if (tag instanceof Hyperlink && !(tag instanceof ActionHyperlink)) { myHaveLink = true; myLink.append(fragment, attributes, tag); } else { super.append(fragment, attributes, tag); } } @Override protected void doPaint(Graphics2D g) { if (!myHaveLink) { super.doPaint(g); } UIUtil.useSafely(g.create(0, 0, myLinkOffset, g.getClipBounds().height), textGraphics -> super.doPaint(textGraphics)); g.translate(myLinkOffset, 0); myLink.setHeight(getHeight()); myLink.doPaint(g); g.translate(-myLinkOffset, 0); } @NotNull @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); if (myHaveLink) { size.width += myLinkWidth; } return size; } @Nullable @Override public Object getFragmentTagAt(int x) { if (myHaveLink) { return myLink.getFragmentTagAt(x - myLinkOffset); } return super.getFragmentTagAt(x); } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleKeymapsRenderer(); } return accessibleContext; } protected class AccessibleKeymapsRenderer extends AccessibleColoredTreeCellRenderer { @Override public String getAccessibleName() { String name = super.getAccessibleName(); // Add shortcuts labels if available @NlsSafe String shortcutName = null; TreePath path = myTree.getPathForRow(myRow); if (path == null) return KeyMapBundle.message("accessible.name.unknown"); Object node = path.getLastPathComponent(); if (node instanceof DefaultMutableTreeNode) { Object data = ((DefaultMutableTreeNode)node).getUserObject(); if (!(data instanceof Hyperlink)) { RowData rowData = extractRowData(data); Shortcut[] shortcuts = rowData.shortcuts; if (shortcuts != null && shortcuts.length > 0) { StringBuilder sb = new StringBuilder(); for (Shortcut shortcut : shortcuts) { if (sb.length() > 0) sb.append(", "); sb.append(KeyMapBundle.message("accessible.name.shortcut")); sb.append(KeymapUtil.getShortcutText(shortcut)); } if (sb.length() > 0) { shortcutName = sb.toString(); } } } } return AccessibleContextUtil.combineAccessibleStrings(name, ", ", shortcutName); } } private class ActionHyperlink extends Hyperlink { private final String myActionId; ActionHyperlink(String actionId, @NlsContexts.LinkLabel String actionText) { super(null, actionText, GRAY_LINK); myActionId = actionId; } @Override public void onClick(MouseEvent event) { selectAction(myActionId); } } } @NotNull private RowData extractRowData(Object data) { String actionId = null; if (data instanceof String) { actionId = (String)data; } else if (data instanceof QuickList) { actionId = ((QuickList)data).getActionId(); } else if (data instanceof Group) { actionId = ((Group)data).getId(); } if (actionId == null) return new RowData(null, null); Shortcut[] shortcuts = myKeymap.getShortcuts(actionId); Set<String> abbreviations = AbbreviationManager.getInstance().getAbbreviations(actionId); return new RowData(shortcuts, abbreviations); } private static final class RowData { public final Shortcut[] shortcuts; public final Set<String> abbreviations; private RowData(Shortcut[] shortcuts, Set<String> abbreviations) { this.shortcuts = shortcuts; this.abbreviations = abbreviations; } } @SuppressWarnings("UseJBColor") private void paintRowData(Tree tree, Object data, Rectangle bounds, Graphics2D g) { RowData rowData = extractRowData(data); Shortcut[] shortcuts = rowData.shortcuts; Set<String> abbreviations = rowData.abbreviations; final GraphicsConfig config = GraphicsUtil.setupAAPainting(g); int totalWidth = 0; final FontMetrics metrics = tree.getFontMetrics(tree.getFont()); if (shortcuts != null && shortcuts.length > 0) { for (Shortcut shortcut : shortcuts) { totalWidth += metrics.stringWidth(KeymapUtil.getShortcutText(shortcut)); totalWidth += 10; } totalWidth -= 5; int x = bounds.x + bounds.width - totalWidth; int fontHeight = (int)metrics.getMaxCharBounds(g).getHeight(); Color c1 = new Color(234, 200, 162); Color c2 = new Color(208, 200, 66); g.translate(0, bounds.y - 1); for (Shortcut shortcut : shortcuts) { int width = metrics.stringWidth(KeymapUtil.getShortcutText(shortcut)); UIUtil.drawSearchMatch(g, x, x + width, bounds.height, c1, c2); g.setColor(Gray._50); g.drawString(KeymapUtil.getShortcutText(shortcut), x, fontHeight); x += width; x += 10; } g.translate(0, -bounds.y + 1); } if (abbreviations != null && abbreviations.size() > 0) { for (String abbreviation : abbreviations) { totalWidth += metrics.stringWidth(abbreviation); totalWidth += 10; } totalWidth -= 5; int x = bounds.x + bounds.width - totalWidth; int fontHeight = (int)metrics.getMaxCharBounds(g).getHeight(); Color c1 = new Color(206, 234, 176); Color c2 = new Color(126, 208, 82); g.translate(0, bounds.y - 1); for (String abbreviation : abbreviations) { int width = metrics.stringWidth(abbreviation); UIUtil.drawSearchMatch(g, x, x + width, bounds.height, c1, c2); g.setColor(Gray._50); g.drawString(abbreviation, x, fontHeight); x += width; x += 10; } g.translate(0, -bounds.y + 1); } config.restore(); } private static class MyColoredTreeCellRenderer extends ColoredTreeCellRenderer { private int myHeight; @Override public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { } @Override protected void doPaint(Graphics2D g) { super.doPaint(g); } public void setHeight(int height) { myHeight = height; } @Override public int getHeight() { return myHeight; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.examples.terasort; import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.LineRecordReader; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.IndexedSortable; import org.apache.hadoop.util.QuickSort; /** * An input format that reads the first 10 characters of each line as the key * and the rest of the line as the value. Both key and value are represented as * Text. */ public class TeraInputFormat extends FileInputFormat<Text, Text> { static final String PARTITION_FILENAME = "_partition.lst"; static final String SAMPLE_SIZE = "terasort.partitions.sample"; private static JobConf lastConf = null; private static InputSplit[] lastResult = null; static class TextSampler implements IndexedSortable { private ArrayList<Text> records = new ArrayList<Text>(); public int compare(int i, int j) { Text left = records.get(i); Text right = records.get(j); return left.compareTo(right); } public void swap(int i, int j) { Text left = records.get(i); Text right = records.get(j); records.set(j, left); records.set(i, right); } public void addKey(Text key) { records.add(new Text(key)); } /** * Find the split points for a given sample. The sample keys are sorted * and down sampled to find even split points for the partitions. The * returned keys should be the start of their respective partitions. * * @param numPartitions * the desired number of partitions * @return an array of size numPartitions - 1 that holds the split * points */ Text[] createPartitions(int numPartitions) { int numRecords = records.size(); System.out.println("Making " + numPartitions + " from " + numRecords + " records"); if (numPartitions > numRecords) { throw new IllegalArgumentException("Requested more partitions than input keys (" + numPartitions + " > " + numRecords + ")"); } new QuickSort().sort(this, 0, records.size()); float stepSize = numRecords / (float) numPartitions; System.out.println("Step size is " + stepSize); Text[] result = new Text[numPartitions - 1]; for (int i = 1; i < numPartitions; ++i) { result[i - 1] = records.get(Math.round(stepSize * i)); } return result; } } /** * Use the input splits to take samples of the input and generate sample * keys. By default reads 100,000 keys from 10 locations in the input, sorts * them and picks N-1 keys to generate N equally sized partitions. * * @param conf * the job to sample * @param partFile * where to write the output file to * @throws IOException * if something goes wrong */ public static void writePartitionFile(JobConf conf, Path partFile) throws IOException { TeraInputFormat inFormat = new TeraInputFormat(); TextSampler sampler = new TextSampler(); Text key = new Text(); Text value = new Text(); int partitions = conf.getNumReduceTasks(); long sampleSize = conf.getLong(SAMPLE_SIZE, 100000); InputSplit[] splits = inFormat.getSplits(conf, conf.getNumMapTasks()); int samples = Math.min(10, splits.length); long recordsPerSample = sampleSize / samples; int sampleStep = splits.length / samples; long records = 0; // take N samples from different parts of the input for (int i = 0; i < samples; ++i) { RecordReader<Text, Text> reader = inFormat.getRecordReader(splits[sampleStep * i], conf, null); while (reader.next(key, value)) { sampler.addKey(key); records += 1; if ((i + 1) * recordsPerSample <= records) { break; } } } FileSystem outFs = partFile.getFileSystem(conf); if (outFs.exists(partFile)) { outFs.delete(partFile, false); } SequenceFile.Writer writer = SequenceFile.createWriter(outFs, conf, partFile, Text.class, NullWritable.class); NullWritable nullValue = NullWritable.get(); for (Text split : sampler.createPartitions(partitions)) { writer.append(split, nullValue); } writer.close(); } static class TeraRecordReader implements RecordReader<Text, Text> { private LineRecordReader in; private LongWritable junk = new LongWritable(); private Text line = new Text(); private static int KEY_LENGTH = 10; public TeraRecordReader(Configuration job, FileSplit split) throws IOException { in = new LineRecordReader(job, split); } public void close() throws IOException { in.close(); } public Text createKey() { return new Text(); } public Text createValue() { return new Text(); } public long getPos() throws IOException { return in.getPos(); } public float getProgress() throws IOException { return in.getProgress(); } public boolean next(Text key, Text value) throws IOException { if (in.next(junk, line)) { if (line.getLength() < KEY_LENGTH) { key.set(line); value.clear(); } else { byte[] bytes = line.getBytes(); key.set(bytes, 0, KEY_LENGTH); value.set(bytes, KEY_LENGTH, line.getLength() - KEY_LENGTH); } return true; } else { return false; } } } @Override public RecordReader<Text, Text> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { return new TeraRecordReader(job, (FileSplit) split); } @Override public InputSplit[] getSplits(JobConf conf, int splits) throws IOException { if (conf == lastConf) { return lastResult; } lastConf = conf; lastResult = super.getSplits(conf, splits); return lastResult; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.manager; import java.io.IOException; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeMap; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sqoop.util.LoggingUtils; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.SqoopOptions.UpdateMode; import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat; import com.cloudera.sqoop.mapreduce.JdbcExportJob; import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob; import com.cloudera.sqoop.mapreduce.OracleUpsertOutputFormat; import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat; import com.cloudera.sqoop.util.ExportException; import com.cloudera.sqoop.util.ImportException; /** * Manages connections to Oracle databases. * Requires the Oracle JDBC driver. */ public class OracleManager extends com.cloudera.sqoop.manager.GenericJdbcManager { public static final Log LOG = LogFactory.getLog( OracleManager.class.getName()); /** * ORA-00942: Table or view does not exist. Indicates that the user does * not have permissions. */ public static final int ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST = 942; /** * This is a catalog view query to list the databases. For Oracle we map the * concept of a database to a schema, and a schema is identified by a user. * In order for the catalog view DBA_USERS be visible to the user who executes * this query, they must have the DBA privilege. */ public static final String QUERY_LIST_DATABASES = "SELECT USERNAME FROM DBA_USERS"; /** * Query to list all tables visible to the current user. Note that this list * does not identify the table owners which is required in order to * ensure that the table can be operated on for import/export purposes. */ public static final String QUERY_LIST_TABLES = "SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = ?"; /** * Query to list all columns of the given table. Even if the user has the * privileges to access table objects from another schema, this query will * limit it to explore tables only from within the active schema. */ public static final String QUERY_COLUMNS_FOR_TABLE = "SELECT COLUMN_NAME FROM ALL_TAB_COLUMNS WHERE " + "OWNER = ? AND TABLE_NAME = ? ORDER BY COLUMN_ID"; /** * Query to find the primary key column name for a given table. This query * is restricted to the current schema. */ public static final String QUERY_PRIMARY_KEY_FOR_TABLE = "SELECT ALL_CONS_COLUMNS.COLUMN_NAME FROM ALL_CONS_COLUMNS, " + "ALL_CONSTRAINTS WHERE ALL_CONS_COLUMNS.CONSTRAINT_NAME = " + "ALL_CONSTRAINTS.CONSTRAINT_NAME AND " + "ALL_CONSTRAINTS.CONSTRAINT_TYPE = 'P' AND " + "ALL_CONS_COLUMNS.TABLE_NAME = ? AND " + "ALL_CONS_COLUMNS.OWNER = ?"; /** * Query to get the current user for the DB session. Used in case of * wallet logins. */ public static final String QUERY_GET_SESSIONUSER = "SELECT USER FROM DUAL"; // driver class to ensure is loaded when making db connection. private static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver"; // Configuration key to use to set the session timezone. public static final String ORACLE_TIMEZONE_KEY = "oracle.sessionTimeZone"; // Oracle XE does a poor job of releasing server-side resources for // closed connections. So we actually want to cache connections as // much as possible. This is especially important for JUnit tests which // may need to make 60 or more connections (serially), since each test // uses a different OracleManager instance. private static class ConnCache { public static final Log LOG = LogFactory.getLog(ConnCache.class.getName()); private static class CacheKey { private final String connectString; private final String username; public CacheKey(String connect, String user) { this.connectString = connect; this.username = user; // note: may be null. } @Override public boolean equals(Object o) { if (o instanceof CacheKey) { CacheKey k = (CacheKey) o; if (null == username) { return k.username == null && k.connectString.equals(connectString); } else { return k.username.equals(username) && k.connectString.equals(connectString); } } else { return false; } } @Override public int hashCode() { if (null == username) { return connectString.hashCode(); } else { return username.hashCode() ^ connectString.hashCode(); } } @Override public String toString() { return connectString + "/" + username; } } private Map<CacheKey, Connection> connectionMap; public ConnCache() { LOG.debug("Instantiated new connection cache."); connectionMap = new HashMap<CacheKey, Connection>(); } /** * @return a Connection instance that can be used to connect to the * given database, if a previously-opened connection is available in * the cache. Returns null if none is available in the map. */ public synchronized Connection getConnection(String connectStr, String username) throws SQLException { CacheKey key = new CacheKey(connectStr, username); Connection cached = connectionMap.get(key); if (null != cached) { connectionMap.remove(key); if (cached.isReadOnly()) { // Read-only mode? Don't want it. cached.close(); } if (cached.isClosed()) { // This connection isn't usable. return null; } cached.rollback(); // Reset any transaction state. cached.clearWarnings(); LOG.debug("Got cached connection for " + key); } return cached; } /** * Returns a connection to the cache pool for future use. If a connection * is already cached for the connectstring/username pair, then this * connection is closed and discarded. */ public synchronized void recycle(String connectStr, String username, Connection conn) throws SQLException { CacheKey key = new CacheKey(connectStr, username); Connection existing = connectionMap.get(key); if (null != existing) { // Cache is already full for this entry. LOG.debug("Discarding additional connection for " + key); conn.close(); return; } // Put it in the map for later use. LOG.debug("Caching released connection for " + key); connectionMap.put(key, conn); } @Override protected synchronized void finalize() throws Throwable { for (Connection c : connectionMap.values()) { c.close(); } super.finalize(); } } private static final ConnCache CACHE; static { CACHE = new ConnCache(); } public OracleManager(final SqoopOptions opts) { super(DRIVER_CLASS, opts); } public void close() throws SQLException { release(); // Release any open statements associated with the connection. if (hasOpenConnection()) { // Release our open connection back to the cache. CACHE.recycle(options.getConnectString(), options.getUsername(), getConnection()); discardConnection(false); } } protected String getColNamesQuery(String tableName) { // SqlManager uses "tableName AS t" which doesn't work in Oracle. String query = "SELECT t.* FROM " + escapeTableName(tableName) + " t WHERE 1=0"; LOG.debug("Using column names query: " + query); return query; } /** * Create a connection to the database; usually used only from within * getConnection(), which enforces a singleton guarantee around the * Connection object. * * Oracle-specific driver uses READ_COMMITTED which is the weakest * semantics Oracle supports. */ protected Connection makeConnection() throws SQLException { Connection connection; String driverClass = getDriverClass(); try { Class.forName(driverClass); } catch (ClassNotFoundException cnfe) { throw new RuntimeException("Could not load db driver class: " + driverClass); } String username = options.getUsername(); String password = options.getPassword(); String connectStr = options.getConnectString(); try { connection = CACHE.getConnection(connectStr, username); } catch (SQLException e) { connection = null; LOG.debug("Cached connecion has expired."); } if (null == connection) { // Couldn't pull one from the cache. Get a new one. LOG.debug("Creating a new connection for " + connectStr + ", using username: " + username); Properties connectionParams = options.getConnectionParams(); if (connectionParams != null && connectionParams.size() > 0) { LOG.debug("User specified connection params. " + "Using properties specific API for making connection."); Properties props = new Properties(); if (username != null) { props.put("user", username); } if (password != null) { props.put("password", password); } props.putAll(connectionParams); connection = DriverManager.getConnection(connectStr, props); } else { LOG.debug("No connection paramenters specified. " + "Using regular API for making connection."); if (username == null) { connection = DriverManager.getConnection(connectStr); } else { connection = DriverManager.getConnection( connectStr, username, password); } } } // We only use this for metadata queries. Loosest semantics are okay. connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); // Setting session time zone setSessionTimeZone(connection); // Rest of the Sqoop code expects that the connection will have be running // without autoCommit, so we need to explicitly set it to false. This is // usually done directly by SqlManager in the makeConnection method, but // since we are overriding it, we have to do it ourselves. connection.setAutoCommit(false); return connection; } public static String getSessionUser(Connection conn) { Statement stmt = null; ResultSet rset = null; String user = null; try { stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rset = stmt.executeQuery(QUERY_GET_SESSIONUSER); if (rset.next()) { user = rset.getString(1); } conn.commit(); } catch (SQLException e) { try { conn.rollback(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex); } } finally { if (rset != null) { try { rset.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close resultset", ex); } } if (stmt != null) { try { stmt.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close statement", ex); } } } if (user == null) { throw new RuntimeException("Unable to get current session user"); } return user; } /** * Set session time zone. * @param conn Connection object * @throws SQLException instance */ private void setSessionTimeZone(Connection conn) throws SQLException { // Need to use reflection to call the method setSessionTimeZone on the // OracleConnection class because oracle specific java libraries are not // accessible in this context. Method method; try { method = conn.getClass().getMethod( "setSessionTimeZone", new Class [] {String.class}); } catch (Exception ex) { LOG.error("Could not find method setSessionTimeZone in " + conn.getClass().getName(), ex); // rethrow SQLException throw new SQLException(ex); } // Need to set the time zone in order for Java to correctly access the // column "TIMESTAMP WITH LOCAL TIME ZONE". The user may have set this in // the configuration as 'oracle.sessionTimeZone'. String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY, "GMT"); try { method.setAccessible(true); method.invoke(conn, clientTimeZoneStr); LOG.info("Time zone has been set to " + clientTimeZoneStr); } catch (Exception ex) { LOG.warn("Time zone " + clientTimeZoneStr + " could not be set on Oracle database."); LOG.info("Setting default time zone: GMT"); try { // Per the documentation at: // http://download-west.oracle.com/docs/cd/B19306_01 // /server.102/b14225/applocaledata.htm#i637736 // The "GMT" timezone is guaranteed to exist in the available timezone // regions, whereas others (e.g., "UTC") are not. method.invoke(conn, "GMT"); } catch (Exception ex2) { LOG.error("Could not set time zone for oracle connection", ex2); // rethrow SQLException throw new SQLException(ex); } } } @Override public void importTable( com.cloudera.sqoop.manager.ImportJobContext context) throws IOException, ImportException { context.setConnManager(this); // Specify the Oracle-specific DBInputFormat for import. context.setInputFormat(OracleDataDrivenDBInputFormat.class); super.importTable(context); } /** * Export data stored in HDFS into a table in a database. */ public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context) throws IOException, ExportException { context.setConnManager(this); JdbcExportJob exportJob = new JdbcExportJob(context, null, null, ExportBatchOutputFormat.class); exportJob.runExport(); } @Override /** * {@inheritDoc} */ public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context) throws IOException, ExportException { context.setConnManager(this); JdbcUpsertExportJob exportJob = new JdbcUpsertExportJob(context, OracleUpsertOutputFormat.class); exportJob.runExport(); } @Override /** * {@inheritDoc} */ public void configureDbOutputColumns(SqoopOptions options) { if (options.getUpdateMode() == UpdateMode.UpdateOnly) { super.configureDbOutputColumns(options); } else { // We're in upsert mode. We need to explicitly set // the database output column ordering in the codeGenerator. Set<String> updateKeys = new LinkedHashSet<String>(); Set<String> updateKeysUppercase = new HashSet<String>(); String updateKeyValue = options.getUpdateKeyCol(); StringTokenizer stok = new StringTokenizer(updateKeyValue, ","); while (stok.hasMoreTokens()) { String nextUpdateColumn = stok.nextToken().trim(); if (nextUpdateColumn.length() > 0) { updateKeys.add(nextUpdateColumn); updateKeysUppercase.add(nextUpdateColumn.toUpperCase()); } else { throw new RuntimeException("Invalid update key column value specified" + ": '" + updateKeyValue + "'"); } } String [] allColNames = getColumnNames(options.getTableName()); List<String> dbOutCols = new ArrayList<String>(); dbOutCols.addAll(updateKeys); for (String col : allColNames) { if (!updateKeysUppercase.contains(col.toUpperCase())) { dbOutCols.add(col); // add update columns to the output order list. } } for (String col : allColNames) { dbOutCols.add(col); // add insert columns to the output order list. } options.setDbOutputColumns(dbOutCols.toArray( new String[dbOutCols.size()])); } } @Override public ResultSet readTable(String tableName, String[] columns) throws SQLException { if (columns == null) { columns = getColumnNames(tableName); } StringBuilder sb = new StringBuilder(); sb.append("SELECT "); boolean first = true; for (String col : columns) { if (!first) { sb.append(", "); } sb.append(escapeColName(col)); first = false; } sb.append(" FROM "); sb.append(escapeTableName(tableName)); String sqlCmd = sb.toString(); LOG.debug("Reading table with command: " + sqlCmd); return execute(sqlCmd); } private Map<String, String> columnTypeNames; /** * Resolve a database-specific type to the Java type that should contain it. * @param tableName table name * @param colName column name * @return the name of a Java type to hold the sql datatype, or null if none. */ private String toDbSpecificJavaType(String tableName, String colName) { if (columnTypeNames == null) { columnTypeNames = getColumnTypeNames(tableName, options.getCall(), options.getSqlQuery()); } String colTypeName = columnTypeNames.get(colName); if (colTypeName != null) { if (colTypeName.equalsIgnoreCase("BINARY_FLOAT")) { return "Float"; } if (colTypeName.equalsIgnoreCase("FLOAT")) { return "Float"; } if (colTypeName.equalsIgnoreCase("BINARY_DOUBLE")) { return "Double"; } if (colTypeName.equalsIgnoreCase("DOUBLE")) { return "Double"; } if (colTypeName.toUpperCase().startsWith("TIMESTAMP")) { return "java.sql.Timestamp"; } } return null; } /** * Resolve a database-specific type to the Hive type that should contain it. * @param tableName table name * @param colName column name * @return the name of a Hive type to hold the sql datatype, or null if none. */ private String toDbSpecificHiveType(String tableName, String colName) { if (columnTypeNames == null) { columnTypeNames = getColumnTypeNames(tableName, options.getCall(), options.getSqlQuery()); } LOG.debug("Column Types and names returned = (" + StringUtils.join(columnTypeNames.keySet(), ",") + ")=>(" + StringUtils.join(columnTypeNames.values(), ",") + ")"); String colTypeName = columnTypeNames.get(colName); if (colTypeName != null) { if (colTypeName.equalsIgnoreCase("BINARY_FLOAT")) { return "FLOAT"; } if (colTypeName.equalsIgnoreCase("BINARY_DOUBLE")) { return "DOUBLE"; } if (colTypeName.toUpperCase().startsWith("TIMESTAMP")) { return "STRING"; } } return null; } /** * Return java type for SQL type. * @param tableName table name * @param columnName column name * @param sqlType sql type * @return java type */ @Override public String toJavaType(String tableName, String columnName, int sqlType) { String javaType = super.toJavaType(tableName, columnName, sqlType); if (javaType == null) { javaType = toDbSpecificJavaType(tableName, columnName); } return javaType; } /** * Return hive type for SQL type. * @param tableName table name * @param columnName column name * @param sqlType sql data type * @return hive type */ @Override public String toHiveType(String tableName, String columnName, int sqlType) { String hiveType = super.toHiveType(tableName, columnName, sqlType); if (hiveType == null) { hiveType = toDbSpecificHiveType(tableName, columnName); } return hiveType; } @Override protected void finalize() throws Throwable { close(); super.finalize(); } @Override protected String getCurTimestampQuery() { return "SELECT SYSDATE FROM dual"; } @Override public String timestampToQueryString(Timestamp ts) { return "TO_TIMESTAMP('" + ts + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; } @Override public String datetimeToQueryString(String datetime, int columnType) { if (columnType == Types.TIMESTAMP) { return "TO_TIMESTAMP('" + datetime + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; } else if (columnType == Types.DATE) { // converting timestamp of the form 2012-11-11 11:11:11.00 to // date of the form 2011:11:11 11:11:11 datetime = datetime.split("\\.")[0]; return "TO_DATE('" + datetime + "', 'YYYY-MM-DD HH24:MI:SS')"; } else { String msg = "Column type is neither timestamp nor date!"; LOG.error(msg); throw new RuntimeException(msg); } } @Override public boolean supportsStagingForExport() { return true; } /** * The concept of database in Oracle is mapped to schemas. Each schema * is identified by the corresponding username. */ @Override public String[] listDatabases() { Connection conn = null; Statement stmt = null; ResultSet rset = null; List<String> databases = new ArrayList<String>(); try { conn = getConnection(); stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rset = stmt.executeQuery(QUERY_LIST_DATABASES); while (rset.next()) { databases.add(rset.getString(1)); } conn.commit(); } catch (SQLException e) { try { conn.rollback(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex); } if (e.getErrorCode() == ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST) { LOG.error("The catalog view DBA_USERS was not found. " + "This may happen if the user does not have DBA privileges. " + "Please check privileges and try again."); LOG.debug("Full trace for ORA-00942 exception", e); } else { LoggingUtils.logAll(LOG, "Failed to list databases", e); } } finally { if (rset != null) { try { rset.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close resultset", ex); } } if (stmt != null) { try { stmt.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close statement", ex); } } try { close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Unable to discard connection", ex); } } return databases.toArray(new String[databases.size()]); } @Override public String[] listTables() { Connection conn = null; PreparedStatement pStmt = null; ResultSet rset = null; List<String> tables = new ArrayList<String>(); String tableOwner = null; try { conn = getConnection(); tableOwner = getSessionUser(conn); pStmt = conn.prepareStatement(QUERY_LIST_TABLES, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); pStmt.setString(1, tableOwner); rset = pStmt.executeQuery(); while (rset.next()) { tables.add(rset.getString(1)); } conn.commit(); } catch (SQLException e) { try { conn.rollback(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex); } LoggingUtils.logAll(LOG, "Failed to list tables", e); } finally { if (rset != null) { try { rset.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close resultset", ex); } } if (pStmt != null) { try { pStmt.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close statement", ex); } } try { close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Unable to discard connection", ex); } } return tables.toArray(new String[tables.size()]); } @Override public String[] getColumnNamesForProcedure(String procedureName) { List<String> ret = new ArrayList<String>(); try { DatabaseMetaData metaData = this.getConnection().getMetaData(); ResultSet results = metaData.getProcedureColumns(null, null, procedureName, null); if (null == results) { return null; } try { while (results.next()) { if (results.getInt("COLUMN_TYPE") != DatabaseMetaData.procedureColumnReturn) { int index = results.getInt("ORDINAL_POSITION"); if (index < 0) { continue; // actually the return type } for (int i = ret.size(); i < index; ++i) { ret.add(null); } String name = results.getString("COLUMN_NAME"); if (index == ret.size()) { ret.add(name); } else { ret.set(index, name); } } } String[] result = ret.toArray(new String[ret.size()]); LOG.debug("getColumnsNamesForProcedure returns " + StringUtils.join(ret, ",")); return result; } finally { results.close(); getConnection().commit(); } } catch (SQLException e) { LoggingUtils.logAll(LOG, "Error reading procedure metadata: ", e); throw new RuntimeException("Can't fetch column names for procedure.", e); } } @Override public Map<String, Integer> getColumnTypesForProcedure(String procedureName) { Map<String, Integer> ret = new TreeMap<String, Integer>(); try { DatabaseMetaData metaData = this.getConnection().getMetaData(); ResultSet results = metaData.getProcedureColumns(null, null, procedureName, null); if (null == results) { return null; } try { while (results.next()) { if (results.getInt("COLUMN_TYPE") != DatabaseMetaData.procedureColumnReturn) { int index = results.getInt("ORDINAL_POSITION"); if (index < 0) { continue; // actually the return type } // we don't care if we get several rows for the // same ORDINAL_POSITION (e.g. like H2 gives us) // as we'll just overwrite the entry in the map: ret.put( results.getString("COLUMN_NAME"), results.getInt("DATA_TYPE")); } } LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ",")); LOG.debug("Types returned = " + StringUtils.join(ret.values(), ",")); return ret.isEmpty() ? null : ret; } finally { results.close(); getConnection().commit(); } } catch (SQLException sqlException) { LoggingUtils.logAll(LOG, "Error reading primary key metadata: " + sqlException.toString(), sqlException); return null; } } @Override public Map<String, String> getColumnTypeNamesForProcedure(String procedureName) { Map<String, String> ret = new TreeMap<String, String>(); try { DatabaseMetaData metaData = this.getConnection().getMetaData(); ResultSet results = metaData.getProcedureColumns(null, null, procedureName, null); if (null == results) { return null; } try { while (results.next()) { if (results.getInt("COLUMN_TYPE") != DatabaseMetaData.procedureColumnReturn) { int index = results.getInt("ORDINAL_POSITION"); if (index < 0) { continue; // actually the return type } // we don't care if we get several rows for the // same ORDINAL_POSITION (e.g. like H2 gives us) // as we'll just overwrite the entry in the map: ret.put( results.getString("COLUMN_NAME"), results.getString("TYPE_NAME")); } } LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ",")); LOG.debug( "Type names returned = " + StringUtils.join(ret.values(), ",")); return ret.isEmpty() ? null : ret; } finally { results.close(); getConnection().commit(); } } catch (SQLException sqlException) { LoggingUtils.logAll(LOG, "Error reading primary key metadata: " + sqlException.toString(), sqlException); return null; } } @Override public String[] getColumnNames(String tableName) { Connection conn = null; PreparedStatement pStmt = null; ResultSet rset = null; List<String> columns = new ArrayList<String>(); String tableOwner = null; String shortTableName = tableName; int qualifierIndex = tableName.indexOf('.'); if (qualifierIndex != -1) { tableOwner = tableName.substring(0, qualifierIndex); shortTableName = tableName.substring(qualifierIndex + 1); } try { conn = getConnection(); if (tableOwner == null) { tableOwner = getSessionUser(conn); } pStmt = conn.prepareStatement(QUERY_COLUMNS_FOR_TABLE, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); pStmt.setString(1, tableOwner); pStmt.setString(2, shortTableName); rset = pStmt.executeQuery(); while (rset.next()) { columns.add(rset.getString(1)); } conn.commit(); } catch (SQLException e) { try { conn.rollback(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex); } LoggingUtils.logAll(LOG, "Failed to list columns", e); } finally { if (rset != null) { try { rset.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close resultset", ex); } } if (pStmt != null) { try { pStmt.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close statement", ex); } } try { close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Unable to discard connection", ex); } } return filterSpecifiedColumnNames(columns.toArray(new String[columns.size()])); } @Override public String getPrimaryKey(String tableName) { Connection conn = null; PreparedStatement pStmt = null; ResultSet rset = null; List<String> columns = new ArrayList<String>(); String tableOwner = null; String shortTableName = tableName; int qualifierIndex = tableName.indexOf('.'); if (qualifierIndex != -1) { tableOwner = tableName.substring(0, qualifierIndex); shortTableName = tableName.substring(qualifierIndex + 1); } try { conn = getConnection(); if (tableOwner == null) { tableOwner = getSessionUser(conn); } pStmt = conn.prepareStatement(QUERY_PRIMARY_KEY_FOR_TABLE, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); pStmt.setString(1, shortTableName); pStmt.setString(2, tableOwner); rset = pStmt.executeQuery(); while (rset.next()) { columns.add(rset.getString(1)); } conn.commit(); } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex); } LoggingUtils.logAll(LOG, "Failed to list columns", e); } finally { if (rset != null) { try { rset.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close resultset", ex); } } if (pStmt != null) { try { pStmt.close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Failed to close statement", ex); } } try { close(); } catch (SQLException ex) { LoggingUtils.logAll(LOG, "Unable to discard connection", ex); } } if (columns.size() == 0) { // Table has no primary key return null; } if (columns.size() > 1) { // The primary key is multi-column primary key. Warn the user. // TODO select the appropriate column instead of the first column based // on the datatype - giving preference to numerics over other types. LOG.warn("The table " + tableName + " " + "contains a multi-column primary key. Sqoop will default to " + "the column " + columns.get(0) + " only for this job."); } return columns.get(0); } @Override public String getInputBoundsQuery(String splitByCol, String sanitizedQuery) { /* * The default input bounds query generated by DataDrivenImportJob * is of the form: * SELECT MIN(splitByCol), MAX(splitByCol) FROM (sanitizedQuery) AS t1 * * This works for most databases but not Oracle since Oracle does not * allow the use of "AS" to project the subquery as a table. Instead the * correct format for use with Oracle is as follows: * SELECT MIN(splitByCol), MAX(splitByCol) FROM (sanitizedQuery) t1 */ return "SELECT MIN(" + splitByCol + "), MAX(" + splitByCol + ") FROM (" + sanitizedQuery + ") t1"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * IndexCreationJUnitTest.java * * Created on April 13, 2005, 4:16 PM Added a Test Case for testing the Task, IUM10 : May 16, 2005, * 2:45 PM */ /** */ package org.apache.geode.cache.query.functional; import static org.apache.geode.distributed.ConfigurationProperties.CACHE_XML_FILE; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_TIME_STATISTICS; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.NAME; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.apache.commons.io.FileUtils; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.CacheUtils; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.IndexStatistics; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.Query; import org.apache.geode.cache.query.QueryInvalidException; import org.apache.geode.cache.query.QueryService; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.data.ComparableWrapper; import org.apache.geode.cache.query.data.Portfolio; import org.apache.geode.cache.query.internal.DefaultQueryService; import org.apache.geode.cache.query.internal.QueryObserverAdapter; import org.apache.geode.cache.query.internal.QueryObserverHolder; import org.apache.geode.cache.query.internal.index.CompactMapRangeIndex; import org.apache.geode.cache.query.internal.index.CompactRangeIndex; import org.apache.geode.cache.query.internal.index.IndexProtocol; import org.apache.geode.cache.query.internal.index.RangeIndex; import org.apache.geode.cache.query.internal.types.ObjectTypeImpl; import org.apache.geode.cache.query.internal.types.StructTypeImpl; import org.apache.geode.cache.query.types.ObjectType; import org.apache.geode.cache.query.types.StructType; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.test.junit.categories.IntegrationTest; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Properties; import java.util.Set; @Category(IntegrationTest.class) public class IndexCreationJUnitTest { private ObjectType resType1 = null; private ObjectType resType2 = null; private int resSize1 = 0; private int resSize2 = 0; private Iterator itert1 = null; private Iterator itert2 = null; private Set set1 = null; private Set set2 = null; private String s1; private String s2; @Before public void setUp() throws java.lang.Exception { CacheUtils.startCache(); Region region = CacheUtils.createRegion("portfolios", Portfolio.class); for (int i = 0; i < 4; i++) { region.put("" + i, new Portfolio(i)); } } @After public void tearDown() throws java.lang.Exception { CacheUtils.closeCache(); } @Test public void testIndexCreation() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolios, positions"); // TASK ICM1 Index i2 = qs.createIndex("secIdIndex", IndexType.FUNCTIONAL, "b.secId", "/portfolios pf, pf.positions.values b"); // TASK ICM2 Index i5 = qs.createIndex("intFunctionIndex", IndexType.FUNCTIONAL, "intFunction(pf.getID)", "/portfolios pf, pf.positions b"); Index i6 = qs.createIndex("statusIndex6", IndexType.FUNCTIONAL, "a.status", "/portfolios.values.toArray a, positions"); Index i7 = qs.createIndex("statusIndex7", IndexType.FUNCTIONAL, "a.status", "/portfolios.getValues().asList() a, positions"); Index i8 = qs.createIndex("statusIndex8", IndexType.FUNCTIONAL, "a.status", "/portfolios.values.asSet a, positions"); // TASK ICM6 Object indices[] = {i1, i2, i5, i6, i7, i8}; // remove any commented Index // from Array for (int j = 0; j < indices.length; j++) { CacheUtils.log(((IndexProtocol) indices[j]).isValid()); boolean r = ((IndexProtocol) indices[j]).isValid(); assertTrue("Test: testIndexCreation FAILED", r); CacheUtils.log(((IndexProtocol) indices[j]).getName()); CacheUtils.log("Test: testIndexCreation PASS"); } } @Test public void testIndexCreationWithImports() throws Exception { // Task ID ICM 16 QueryService qs; qs = CacheUtils.getQueryService(); Index idx; try { idx = qs.createIndex("importsIndex", IndexType.FUNCTIONAL, "status", "/portfolios, (map<string,Position>)positions"); fail("Should have thrown a QueryInvalidException"); // can't find type // Position } catch (QueryInvalidException e) { // pass } idx = qs.createIndex("importsIndex", IndexType.FUNCTIONAL, "status", "/portfolios, (map<string,Position>)positions", "import org.apache.geode.cache.\"query\".data.Position"); qs.removeIndex(idx); idx = qs.createIndex("importsIndex2", IndexType.FUNCTIONAL, "status", "/portfolios, positions TYPE Position", "import org.apache.geode.cache.\"query\".data.Position"); } @Test public void testSimilarIndexCreation() throws Exception { // Task ID: ICM17 QueryService qs; qs = CacheUtils.getQueryService(); // boolean exceptionoccurred = true; qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolios, positions"); qs.createIndex("secIdIndex", IndexType.FUNCTIONAL, "b.secId", "/portfolios pf, pf.positions.values b"); try { qs.createIndex("secIdIndexDuplicate", IndexType.FUNCTIONAL, "b.secId", "/portfolios pf, pf.positions.values b"); fail("testSimilarIndexCreation: Allowed duplicate index creation"); } catch (Exception e) { // testSimilarIndexCreation: Exception if duplicate index is // created with diffrenet name but same from clause & expression } try { qs.createIndex("secIdIndexDuplicate", IndexType.FUNCTIONAL, "b1.secId", "/portfolios pf1, pf1.positions.values b1"); fail("testSimilarIndexCreation: Allowed duplicate index creation"); } catch (Exception e) { // testSimilarIndexCreation: Exception if duplicate index is // created with diffrenet name but same from clause & expression } // org.apache.geode.cache.query.IndexExistsException: Similar Index // Exists try { qs.createIndex("statusIndexDuplicate", IndexType.FUNCTIONAL, "b.status", "/portfolios b, positions"); fail("testSimilarIndexCreation: Allowed duplicate index creation"); } catch (Exception e) { // testSimilarIndexCreation: Exception if duplicate index is // created with diffrenet name but same from clause & expression } } @Test public void testInvalidImportsIndexCreation() throws Exception { // Task ID: Invalid Indexes: ICM15 QueryService qs; qs = CacheUtils.getQueryService(); try { qs.createIndex("typeIndex", IndexType.FUNCTIONAL, "\"type\"", "/portfolios pf, pf.positions b", "pf.position1"); // projection attributes are not yet implemented // last parameter is the imports statement, so this is a syntax // error fail("Should have thrown an exception since imports are invalid"); // TASK ICM7 } catch (QueryInvalidException e) { // pass } } @Ignore("TODO: disabled and has no assertions") @Test public void testElementIndexCreation() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); qs.createIndex("funcReturnSecIdIndex", IndexType.FUNCTIONAL, "pf.funcReturnSecId(element(select distinct pos from /portfolios pf, pf.positions.values as pos where pos.sharesOutstanding = 5000))", "/portfolios pf, pf.positions b"); // TASK ICM8: InvalidIndexCreation // Query q = qs.newQuery("(element(select distinct pos from // /portfolios pf, pf.positions.values as pos where // pos.sharesOutstanding = 5000))"); // Object r=q.execute(); // CacheUtils.log(Utils.printResult(r)); } @Test public void testIndexCreationOnNVLFunction() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Query query = null; qs.createIndex("NVLIndex1", IndexType.FUNCTIONAL, "nvl(pf.position2, pf.position1).secId", "/portfolios pf"); query = CacheUtils.getQueryService().newQuery( "select distinct * from /portfolios pf where nvl(pf.position2, pf.position1).secId = 'SUN'"); QueryObserverImpl observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); query.execute(); if (!observer.isIndexesUsed) { fail("NO INDEX USED"); } query = CacheUtils.getQueryService().newQuery( "select distinct nvl(pf.position2, 'inProjection') from /portfolios pf where nvl(pf.position2, pf.position1).secId = 'SUN'"); observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); query.execute(); if (!observer.isIndexesUsed && observer.indexesUsed.size() != 1) { fail("NO INDEX USED"); } } @Test public void testIndexCreationWithImport() throws Exception { // Task ID: ICM16 QueryService qs; qs = CacheUtils.getQueryService(); Index i3 = qs.createIndex("typeIndex", IndexType.FUNCTIONAL, "\"type\"", "/portfolios type Portfolio, positions b", "IMPORT org.apache.geode.cache.\"query\".data.Portfolio"); // TASK ICM3 Region 'IMPORT' not found:....[BUG : Verified Fixed ] // Index i4=(Index)qs.createIndex("boolFunctionIndex", // IndexType.FUNCTIONAL,"boolFunction(pf.status)","/portfolios pf, // pf.positions.values b"); // TASK ICM5 org.apache.geode.cache.query.IndexInvalidException Object indices[] = {i3}; // remove any commented Index from Array for (int j = 0; j < indices.length; j++) { CacheUtils.log(((IndexProtocol) indices[j]).isValid()); boolean r = ((IndexProtocol) indices[j]).isValid(); if (r == true) { CacheUtils.log(((IndexProtocol) indices[j]).getName()); CacheUtils.log("Test: testIndexCreation PASS"); } else { fail("Test: testIndexCreation FAILED"); } } } @Test public void testComparisonBetnWithAndWithoutIndexCreationComparableObject() throws Exception { // Task ID IUM10 SelectResults r[][] = new SelectResults[4][2]; QueryService qs; qs = CacheUtils.getQueryService(); String queries[] = {"select distinct * from /portfolios pf where pf.getCW(pf.ID) = $1", "select distinct * from /portfolios pf where pf.getCW(pf.ID) > $1", "select distinct * from /portfolios pf where pf.getCW(pf.ID) < $1", "select distinct * from /portfolios pf where pf.getCW(pf.ID) != $1" // TASK IUM 10 }; for (int i = 0; i < queries.length; i++) { Query q = null; q = CacheUtils.getQueryService().newQuery(queries[i]); Object params[] = new Object[1]; params[0] = new ComparableWrapper(1); QueryObserverImpl observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); r[i][0] = (SelectResults) q.execute(params); resType1 = (r[i][0]).getCollectionType().getElementType(); resSize1 = ((r[i][0]).size()); set1 = ((r[i][0]).asSet()); // Iterator iter=set1.iterator(); } // Create an Index on status and execute the same query again. qs = CacheUtils.getQueryService(); qs.createIndex("cIndex", IndexType.FUNCTIONAL, "pf.getCW(pf.ID)", "/portfolios pf"); for (int i = 0; i < queries.length; i++) { Query q = null; q = CacheUtils.getQueryService().newQuery(queries[i]); Object params[] = new Object[1]; params[0] = new ComparableWrapper(1); QueryObserverImpl observer2 = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer2); r[i][1] = (SelectResults) q.execute(params); if (!observer2.isIndexesUsed) { fail("FAILED: Index NOT Used"); } resType2 = (r[i][1]).getCollectionType().getElementType(); resSize2 = ((r[i][1]).size()); set2 = ((r[i][1]).asSet()); } CacheUtils.compareResultsOfWithAndWithoutIndex(r, this); } @Test public void testIndexCreationWithIndexOperatorUsage() throws Exception { // Task ID : ICM 18 QueryService qs; qs = CacheUtils.getQueryService(); String[] queries = { "select distinct * from /portfolios pf where pf.collectionHolderMap[(pf.ID).toString()].arr[pf.ID] != -1"}; Object r[][] = new Object[queries.length][2]; for (int i = 0; i < queries.length; i++) { Query q = null; q = qs.newQuery(queries[i]); CacheUtils.getLogger().info("Executing query: " + queries[i]); r[i][0] = q.execute(); CacheUtils.log("Executed query:" + queries[i]); } Index i1 = qs.createIndex("fIndex", IndexType.FUNCTIONAL, "sIter", "/portfolios pf, pf.collectionHolderMap[(pf.ID).toString()].arr sIter"); Index i2 = qs.createIndex("cIndex", IndexType.FUNCTIONAL, "pf.collectionHolderMap[(pf.ID).toString()].arr[pf.ID]", "/portfolios pf"); // BUG # 32498 // Index i3 = qs.createIndex("nIndex", IndexType.FUNCTIONAL, // "pf.collectionHolderMap[((pf.ID%2)).toString()].arr[pf.ID]","/portfolios // pf"); for (int i = 0; i < queries.length; i++) { Query q = null; q = qs.newQuery(queries[i]); CacheUtils.getLogger().info("Executing query: " + queries[i]); QueryObserverImpl observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); r[i][1] = q.execute(); SelectResults results = (SelectResults) r[i][1]; assertTrue(results.size() > 0); CacheUtils.log("Executing query: " + queries[i] + " with index created"); if (!observer.isIndexesUsed) { fail("Index is NOT uesd"); } Iterator itr = observer.indexesUsed.iterator(); assertTrue(itr.hasNext()); String temp = itr.next().toString(); assertEquals(temp, "cIndex"); } CacheUtils.log(((RangeIndex) i1).dump()); CacheUtils.log(((CompactRangeIndex) i2).dump()); StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet(); ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries); // CacheUtils.log(((RangeIndex)i3).dump()); // Index i3 = // qs.createIndex("Task6Index",IndexType.FUNCTIONAL,"pos.secId","/portfolios // pf, pf.positions.values pos"); } @Test public void testIndexCreationOnKeys() throws Exception { // Task ID : ICM 9 QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("kIndex", IndexType.FUNCTIONAL, "pf", "/portfolios.keys pf"); Index i2 = qs.createIndex("k1Index", IndexType.FUNCTIONAL, "key", "/portfolios.entries"); Index i3 = qs.createIndex("k2Index", IndexType.FUNCTIONAL, "pf", "/portfolios.keys.toArray pf"); // Index i4 = qs.createIndex("k3Index", IndexType.FUNCTIONAL, // "pf","/portfolios.keys().toArray() pf"); Index i5 = qs.createIndex("k4Index", IndexType.FUNCTIONAL, "pf", "/portfolios.getKeys.asList pf"); // Index i5 = qs.createIndex("k5Index", IndexType.FUNCTIONAL, // "pf","/portfolios.getKeys.asList() pf"); Index i6 = qs.createIndex("k5Index", IndexType.FUNCTIONAL, "pf", "/portfolios.getKeys.asSet() pf"); // Index i5 = qs.createIndex("k5Index", IndexType.FUNCTIONAL, // "pf","/portfolios.getKeys.asSet pf"); CacheUtils.log(((CompactRangeIndex) i1).dump()); CacheUtils.log(((CompactRangeIndex) i2).dump()); CacheUtils.log(((CompactRangeIndex) i3).dump()); CacheUtils.log(((CompactRangeIndex) i5).dump()); CacheUtils.log(((CompactRangeIndex) i6).dump()); } @Test public void testIndexCreationOnRegionEntry() throws Exception { // Task ID : ICM11 QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("r1Index", IndexType.FUNCTIONAL, "secId", "/portfolios.values['1'].positions.values"); qs.createIndex("r12Index", IndexType.FUNCTIONAL, "secId", "/portfolios['1'].positions.values"); CacheUtils.log(((CompactRangeIndex) i1).dump()); // CacheUtils.log(((RangeIndex)i2).dump()); } /** * Creation of index on a path derived from Region.Entry object obtained via entrySet , fails as * that function was not supported in the QRegion & DummyQRegion */ @Test public void testBug36823() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); qs.createIndex("entryIndex", IndexType.FUNCTIONAL, "value.getID()", "/portfolios.entrySet pf"); Region rgn = CacheUtils.getRegion("/portfolios"); rgn.put("4", new Portfolio(4)); rgn.put("5", new Portfolio(5)); Query qr = qs.newQuery("Select distinct * from /portfolios.entrySet pf where pf.value.getID() = 4"); SelectResults sr = (SelectResults) qr.execute(); assertEquals(sr.size(), 1); } /** * Creation of index on key path derived from Region.Entry object obtained via keySet , fails as * that function was not supported in the QRegion & DummyQRegion */ @Test public void testBug36590() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); qs.createIndex("keyIndex", IndexType.FUNCTIONAL, "keys", "/portfolios.keySet keys"); Region rgn = CacheUtils.getRegion("/portfolios"); rgn.put("4", new Portfolio(4)); rgn.put("5", new Portfolio(5)); Query qr = qs.newQuery("Select distinct * from /portfolios.keySet keys where keys = '4'"); SelectResults sr = (SelectResults) qr.execute(); assertEquals(sr.size(), 1); } /** * The Index maintenance has a bug as it does not re-evaluate the index maintenance collection in * the IMQEvaluator when an entry gets modified & so the index resultset is messed up */ @Test public void testBug36591() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("keyIndex", IndexType.FUNCTIONAL, "ks.hashCode", "/portfolios.keys ks"); Region rgn = CacheUtils.getRegion("/portfolios"); rgn.put("4", new Portfolio(4)); rgn.put("5", new Portfolio(5)); CacheUtils.log(((CompactRangeIndex) i1).dump()); Query qr = qs.newQuery("Select distinct * from /portfolios.keys keys where keys.hashCode >= $1"); SelectResults sr = (SelectResults) qr.execute(new Object[] {new Integer(-1)}); assertEquals(6, sr.size()); } /** * Creation of index on a path derived from Region.Entry object obtained via entrySet , fails as * that function was not supported in the QRegion & DummyQRegion */ @Test public void testBug43519() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index index = qs.createIndex("shortIndex", IndexType.FUNCTIONAL, "p.shortID", "/portfolios p"); Region rgn = CacheUtils.getRegion("/portfolios"); for (int i = 1; i <= 10; i++) { String key = "" + i; Portfolio p = new Portfolio(i); p.shortID = new Short(key); // addToIndex rgn.put(key, p); // updateIndex rgn.put(key, p); if (i % 2 == 0) { // destroy from index. rgn.destroy(key); } } Query qr = qs.newQuery("Select p.shortID from /portfolios p where p.shortID < 5"); SelectResults sr = (SelectResults) qr.execute(); assertEquals(sr.size(), 2); } /** * Test the Index maiantenance as it may use the method keys() of QRegion instead of DummyQRegion * while running an IndexMaintenanceQuery */ @Test public void testIMQFailureAsMethodKeysNAInDummyQRegion() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("keyIndex", IndexType.FUNCTIONAL, "ks.hashCode", "/portfolios.keys() ks"); Region rgn = CacheUtils.getRegion("/portfolios"); rgn.put("4", new Portfolio(4)); rgn.put("5", new Portfolio(5)); CacheUtils.log(((CompactRangeIndex) i1).dump()); Query qr = qs.newQuery( "Select distinct keys.hashCode from /portfolios.keys() keys where keys.hashCode >= $1"); SelectResults sr = (SelectResults) qr.execute(new Object[] {new Integer(-1)}); assertEquals(6, sr.size()); } @Test public void testIndexCreationWithFunctions() throws Exception { // Task ID : ICM14 QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("SetSecIDIndex1", IndexType.FUNCTIONAL, "b.secId", "/portfolios.asSet pf, pf.positions.values b"); Index i2 = qs.createIndex("ListSecIDIndex2", IndexType.FUNCTIONAL, "b.secId", "/portfolios.asList pf, pf.positions.values b"); Index i3 = qs.createIndex("ArraySecIDIndex3", IndexType.FUNCTIONAL, "b.secId", "/portfolios.toArray pf, pf.positions.values b"); CacheUtils.log(((RangeIndex) i1).dump()); CacheUtils.log(((RangeIndex) i2).dump()); CacheUtils.log(((RangeIndex) i3).dump()); } @Test public void testInvalidIndexes() throws Exception { // Task ID: ICM15 QueryService qs; qs = CacheUtils.getQueryService(); try { Index i1 = qs.createIndex("r1Index", IndexType.FUNCTIONAL, "secId", "/portfolios.toArray[1].positions.values"); CacheUtils.log(((RangeIndex) i1).dump()); fail("Index creation should have failed"); } catch (Exception e) { } try { Index i2 = qs.createIndex("r12Index", IndexType.FUNCTIONAL, "secId", "/portfolios.asList[1].positions.values"); CacheUtils.log(((RangeIndex) i2).dump()); fail("Index creation should have failed"); } catch (Exception e) { } } @Test public void testIndexCreationWithFunctionsinFromClause() throws Exception { // Task ID: ICM13 QueryService qs; qs = CacheUtils.getQueryService(); // BUG #32586 : FIXED Index i1 = qs.createIndex("Index11", IndexType.FUNCTIONAL, "status", "/portfolios.values.toArray()"); Index i2 = qs.createIndex("Index12", IndexType.FUNCTIONAL, "ID", "/portfolios.values.asSet"); Index i3 = qs.createIndex("Index13", IndexType.FUNCTIONAL, "ID", "/portfolios.values.asList"); qs.createIndex("Index14", IndexType.FUNCTIONAL, "value.ID", "/portfolios.entries.toArray()"); qs.createIndex("Index15", IndexType.FUNCTIONAL, "value.ID", "/portfolios.entries.asSet"); qs.createIndex("Index16", IndexType.FUNCTIONAL, "value.ID", "/portfolios.entries.asList"); // BUG #32586 : FIXED qs.createIndex("Index17", IndexType.FUNCTIONAL, "kIter", "/portfolios.keys.toArray() kIter"); qs.createIndex("Index18", IndexType.FUNCTIONAL, "kIter", "/portfolios.keys.asSet kIter"); qs.createIndex("Index19", IndexType.FUNCTIONAL, "kIter", "/portfolios.keys.asList kIter"); CacheUtils.log(((CompactRangeIndex) i1).dump()); CacheUtils.log(((CompactRangeIndex) i2).dump()); CacheUtils.log(((CompactRangeIndex) i3).dump()); } @Test public void testIndexObjectTypeWithRegionConstraint() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("Index1", IndexType.FUNCTIONAL, "b.secId", "/portfolios pf, pf.positions.values b"); ObjectType type = ((IndexProtocol) i1).getResultSetType(); String fieldNames[] = {"index_iter1", "index_iter2"}; ObjectType fieldTypes[] = {new ObjectTypeImpl(Portfolio.class), new ObjectTypeImpl(Object.class)}; // ObjectType expectedType = new StructTypeImpl( fieldNames,fieldTypes); ObjectType expectedType = new StructTypeImpl(fieldNames, fieldTypes); if (!(type instanceof StructType && type.equals(expectedType))) { fail( "The ObjectType obtained from index is not of the expected type. Type obtained from index=" + type); } Index i2 = qs.createIndex("Index2", IndexType.FUNCTIONAL, "pf.ID", "/portfolios.values pf"); type = ((IndexProtocol) i2).getResultSetType(); expectedType = new ObjectTypeImpl(Portfolio.class); if (!type.equals(expectedType)) { fail( "The ObjectType obtained from index is not of the expected type. Type obtained from index=" + type); } Index i3 = qs.createIndex("Index3", IndexType.FUNCTIONAL, "pos.secId", "/portfolios['0'].positions.values pos"); type = ((IndexProtocol) i3).getResultSetType(); expectedType = new ObjectTypeImpl(Object.class); if (!type.equals(expectedType)) { fail( "The ObjectType obtained from index is not of the expected type. Type obtained from index=" + type); } Index i4 = qs.createIndex("Index4", IndexType.PRIMARY_KEY, "ID", "/portfolios"); type = ((IndexProtocol) i4).getResultSetType(); expectedType = new ObjectTypeImpl(Portfolio.class); if (!type.equals(expectedType)) { fail( "The ObjectType obtained from index is not of the expected type. Type obtained from index=" + type); } } @Test public void testIndexOnOverflowRegion() throws Exception { String regionName = "portfolios_overflow"; // overflow region. AttributesFactory attributesFactory = new AttributesFactory(); attributesFactory.setValueConstraint(Portfolio.class); attributesFactory.setEvictionAttributes( EvictionAttributes.createLRUEntryAttributes(1, EvictionAction.OVERFLOW_TO_DISK)); Region region = CacheUtils.createRegion(regionName, attributesFactory.create(), true); for (int i = 0; i < 4; i++) { region.put(new Portfolio(i), new Portfolio(i)); } QueryService qs = CacheUtils.getQueryService(); // Currently supported with compact range-index. Index i1 = qs.createIndex("idIndex", IndexType.FUNCTIONAL, "pf.ID", "/portfolios_overflow pf"); Index i2 = qs.createIndex("keyIdIndex", IndexType.FUNCTIONAL, "key.ID", "/portfolios_overflow.keys key"); // Not yet supported with range-index. try { Index i3 = qs.createIndex("idIndex2", IndexType.FUNCTIONAL, "pf.ID", "/portfolios_overflow pf, pf.positions pos"); fail("Range index not supported on overflow region."); } catch (UnsupportedOperationException ex) { // Expected. } // Execute query. String[] queryStr = new String[] {"Select * from /portfolios_overflow pf where pf.ID = 2", "Select * from /portfolios_overflow.keys key where key.ID = 2", "Select * from /portfolios_overflow pf where pf.ID > 1", "Select * from /portfolios_overflow pf where pf.ID < 2",}; int[] resultSize = new int[] {1, 1, 2, 2}; for (int i = 0; i < queryStr.length; i++) { Query q = qs.newQuery(queryStr[i]); QueryObserverImpl observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); SelectResults results = (SelectResults) q.execute(); if (!observer.isIndexesUsed) { fail("Index not used for query. " + queryStr[i]); } assertEquals(results.size(), resultSize[i]); } for (int i = 0; i < 10; i++) { region.put(new Portfolio(i), new Portfolio(i)); } // Persistent overflow region. } @Test public void testMapKeyIndexCreation_1_NonCompactType() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("Index1", IndexType.FUNCTIONAL, "pf.positions[*]", "/portfolios pf"); assertEquals(i1.getCanonicalizedIndexedExpression(), "index_iter1.positions[*]"); assertTrue(i1 instanceof CompactMapRangeIndex); } @Test public void testMapKeyIndexCreation_2_NonCompactType() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = qs.createIndex("Index1", IndexType.FUNCTIONAL, "pf.positions['key1','key2','key3']", "/portfolios pf"); assertEquals(i1.getCanonicalizedIndexedExpression(), "index_iter1.positions['key1','key2','key3']"); assertTrue(i1 instanceof CompactMapRangeIndex); CompactMapRangeIndex mri = (CompactMapRangeIndex) i1; Object mapKeys[] = mri.getMapKeysForTesting(); assertEquals(mapKeys.length, 3); Set<String> keys = new HashSet<String>(); keys.add("key1"); keys.add("key2"); keys.add("key3"); for (Object key : mapKeys) { keys.remove(key); } assertTrue(keys.isEmpty()); String[] patterns = mri.getPatternsForTesting(); assertEquals(patterns.length, 3); Set<String> patternsSet = new HashSet<String>(); patternsSet.add("index_iter1.positions['key1']"); patternsSet.add("index_iter1.positions['key2']"); patternsSet.add("index_iter1.positions['key3']"); for (String ptrn : patterns) { patternsSet.remove(ptrn); } assertTrue(patternsSet.isEmpty()); assertEquals(mri.getIndexedExpression(), "pf.positions['key1','key2','key3']"); } /** * Test for bug 46872, make sure we recover the index correctly if the cache.xml changes for a * persistent region. */ @Test public void testIndexCreationFromXML() throws Exception { InternalDistributedSystem.getAnyInstance().disconnect(); File file = new File("persistData0"); file.mkdir(); { Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); props.setProperty(CACHE_XML_FILE, getClass().getResource("index-creation-with-eviction.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); // Create the cache which causes the cache-xml-file to be parsed Cache cache = CacheFactory.create(ds); QueryService qs = cache.getQueryService(); Region region = cache.getRegion("mainReportRegion"); for (int i = 0; i < 100; i++) { Portfolio pf = new Portfolio(i); pf.setCreateTime(i); region.put("" + i, pf); } // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery( "<trace>SELECT * FROM /mainReportRegion.entrySet mr Where mr.value.createTime > 1L and mr.value.createTime < 3L") .execute(); assertEquals("OQL index results did not match", 1, results.size()); cache.close(); ds.disconnect(); } { Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); // Using a different cache.xml that changes some region properties // That will force the disk code to copy the region entries. props.setProperty(CACHE_XML_FILE, getClass().getResource("index-creation-without-eviction.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); Cache cache = CacheFactory.create(ds); QueryService qs = cache.getQueryService(); Region region = cache.getRegion("mainReportRegion"); // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery( "<trace>SELECT * FROM /mainReportRegion.entrySet mr Where mr.value.createTime > 1L and mr.value.createTime < 3L") .execute(); assertEquals("OQL index results did not match", 1, results.size()); ds.disconnect(); FileUtils.deleteDirectory(file); } } @Test public void testIndexCreationFromXMLForLocalScope() throws Exception { InternalDistributedSystem.getAnyInstance().disconnect(); File file = new File("persistData0"); file.mkdir(); Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); props.setProperty(CACHE_XML_FILE, getClass().getResource("index-creation-without-eviction.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); Cache cache = CacheFactory.create(ds); Region localRegion = cache.getRegion("localRegion"); for (int i = 0; i < 100; i++) { Portfolio pf = new Portfolio(i); localRegion.put("" + i, pf); } QueryService qs = cache.getQueryService(); Index ind = qs.getIndex(localRegion, "localIndex"); assertNotNull("Index localIndex should have been created ", ind); // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery("<trace>SELECT * FROM " + localRegion.getFullPath() + " Where ID > 0").execute(); assertEquals("OQL index results did not match", 99, results.size()); ds.disconnect(); FileUtils.deleteDirectory(file); } @Test public void testIndexCreationFromXMLForDiskLocalScope() throws Exception { InternalDistributedSystem.getAnyInstance().disconnect(); File file = new File("persistData0"); // TODO: use TemporaryFolder file.mkdir(); Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); props.setProperty(CACHE_XML_FILE, getClass().getResource("index-creation-without-eviction.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); Cache cache = CacheFactory.create(ds); Region localDiskRegion = cache.getRegion("localDiskRegion"); for (int i = 0; i < 100; i++) { Portfolio pf = new Portfolio(i); localDiskRegion.put("" + i, pf); } QueryService qs = cache.getQueryService(); Index ind = qs.getIndex(localDiskRegion, "localDiskIndex"); assertNotNull("Index localIndex should have been created ", ind); // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery( "<trace>SELECT * FROM " + localDiskRegion.getFullPath() + " Where status = 'active'") .execute(); assertEquals("OQL index results did not match", 50, results.size()); ds.disconnect(); FileUtils.deleteDirectory(file); } @Test public void testIndexInitializationForOverFlowRegions() throws Exception { InternalDistributedSystem.getAnyInstance().disconnect(); File file = new File("persistData0"); file.mkdir(); { Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); props.setProperty(ENABLE_TIME_STATISTICS, "true"); props.setProperty(CACHE_XML_FILE, getClass().getResource("index-recovery-overflow.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); // Create the cache which causes the cache-xml-file to be parsed Cache cache = CacheFactory.create(ds); QueryService qs = cache.getQueryService(); Region region = cache.getRegion("mainReportRegion"); for (int i = 0; i < 100; i++) { Portfolio pf = new Portfolio(i); pf.setCreateTime(i); region.put("" + i, pf); } IndexStatistics is1 = qs.getIndex(region, "status").getStatistics(); assertEquals(2, is1.getNumberOfKeys()); assertEquals(100, is1.getNumberOfValues()); IndexStatistics is2 = qs.getIndex(region, "ID").getStatistics(); assertEquals(100, is2.getNumberOfKeys()); assertEquals(100, is2.getNumberOfValues()); // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery( "<trace>SELECT * FROM /mainReportRegion.entrySet mr Where mr.value.createTime > 1L and mr.value.createTime < 3L") .execute(); assertEquals("OQL index results did not match", 1, results.size()); cache.close(); ds.disconnect(); } { Properties props = new Properties(); props.setProperty(NAME, "test"); props.setProperty(MCAST_PORT, "0"); props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); props.setProperty(ENABLE_TIME_STATISTICS, "true"); props.setProperty(CACHE_XML_FILE, getClass().getResource("index-recovery-overflow.xml").toURI().getPath()); DistributedSystem ds = DistributedSystem.connect(props); Cache cache = CacheFactory.create(ds); QueryService qs = cache.getQueryService(); Region region = cache.getRegion("mainReportRegion"); assertTrue("Index initialization time should not be 0.", ((LocalRegion) region).getCachePerfStats().getIndexInitializationTime() > 0); IndexStatistics is1 = qs.getIndex(region, "status").getStatistics(); assertEquals(2, is1.getNumberOfKeys()); assertEquals(100, is1.getNumberOfValues()); IndexStatistics is2 = qs.getIndex(region, "ID").getStatistics(); assertEquals(100, is2.getNumberOfKeys()); assertEquals(100, is2.getNumberOfValues()); // verify that a query on the creation time works as expected SelectResults results = (SelectResults) qs .newQuery( "<trace>SELECT * FROM /mainReportRegion.entrySet mr Where mr.value.createTime > 1L and mr.value.createTime < 3L") .execute(); assertEquals("OQL index results did not match", 1, results.size()); ds.disconnect(); FileUtils.deleteDirectory(file); } } @Test public void testIndexCreationWithoutLoadingData() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); Index i1 = ((DefaultQueryService) qs).createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolios", null, false); Index i2 = ((DefaultQueryService) qs).createIndex("secIndex", IndexType.FUNCTIONAL, "pos.secId", "/portfolios p, p.positions.values pos", null, false); Index i3 = ((DefaultQueryService) qs).createIndex("statusHashIndex", IndexType.HASH, "status", "/portfolios", null, false); assertEquals("Index should have been empty ", 0, i1.getStatistics().getNumberOfKeys()); assertEquals("Index should have been empty ", 0, i1.getStatistics().getNumberOfValues()); assertEquals("Index should have been empty ", 0, i2.getStatistics().getNumberOfKeys()); assertEquals("Index should have been empty ", 0, i2.getStatistics().getNumberOfValues()); assertEquals("Index should have been empty ", 0, i3.getStatistics().getNumberOfKeys()); assertEquals("Index should have been empty ", 0, i3.getStatistics().getNumberOfValues()); qs.removeIndexes(); i1 = ((DefaultQueryService) qs).createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolios", null, true); i2 = ((DefaultQueryService) qs).createIndex("secIndex", IndexType.FUNCTIONAL, "pos.secId", "/portfolios p, p.positions.values pos", null, true); i3 = ((DefaultQueryService) qs).createIndex("statusHashIndex", IndexType.HASH, "status", "/portfolios", null, true); assertEquals("Index should not have been empty ", 2, i1.getStatistics().getNumberOfKeys()); assertEquals("Index should not have been empty ", 4, i1.getStatistics().getNumberOfValues()); assertEquals("Index should not have been empty ", 8, i2.getStatistics().getNumberOfKeys()); assertEquals("Index should not have been empty ", 8, i2.getStatistics().getNumberOfValues()); assertEquals("Index should not have been empty ", 0, i3.getStatistics().getNumberOfKeys()); // hash // index // does // not // have // keys assertEquals("Index should not have been empty ", 4, i3.getStatistics().getNumberOfValues()); } private static class QueryObserverImpl extends QueryObserverAdapter { boolean isIndexesUsed = false; ArrayList indexesUsed = new ArrayList(); public void beforeIndexLookup(Index index, int oper, Object key) { indexesUsed.add(index.getName()); } public void afterIndexLookup(Collection results) { if (results != null) { isIndexesUsed = true; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.solver; import static org.apache.jena.sparql.solver.TestSolverLib.bgp ; import static org.apache.jena.sparql.solver.TestSolverLib.matcher ; import static org.apache.jena.sparql.solver.TestSolverLib.triple ; import org.apache.jena.atlas.junit.BaseTest ; import org.apache.jena.graph.Triple ; import org.apache.jena.sparql.core.BasicPattern ; import org.apache.jena.sparql.engine.optimizer.StatsMatcher ; import org.apache.jena.sparql.engine.optimizer.reorder.* ; import org.junit.Test ; public class TestReorder extends BaseTest { @Test public void match_01() { StatsMatcher matcher = matcher("((:x :p ANY) 5)") ; Triple t = triple("(:x :p ?v)") ; double d = matcher.match(t) ; assertEquals(5.0, d, 0) ; } @Test public void match_02() { StatsMatcher matcher = matcher("((:x :p ANY) 5)") ; Triple t = triple("(:x :q ?v)") ; // No match double d = matcher.match(t) ; assertEquals(-1, d, 0) ; } @Test public void match_03() { StatsMatcher matcher = matcher("((:x :p VAR) 5)") ; Triple t = triple("(:x :p ?v)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } @Test public void match_04() { StatsMatcher matcher = matcher("((TERM :p VAR) 5)") ; Triple t = triple("(:x :p ?v)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } @Test public void match_05() { StatsMatcher matcher = matcher("((URI :p VAR) 5)") ; Triple t = triple("(:x :p ?v)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } @Test public void match_06() { StatsMatcher matcher = matcher("((LITERAL :p VAR) 5)") ; Triple t = triple("(:x :p ?v)") ; // No match double d = matcher.match(t) ; assertEquals(-1, d, 0) ; } @Test public void match_07() { StatsMatcher matcher = matcher("((BNODE :p VAR) 5)") ; Triple t = triple("(_:a :p ?v)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } @Test public void match_08() { StatsMatcher matcher = matcher("((VAR :p LITERAL) 5)") ; Triple t = triple("(?x :p ?v)") ; // No match double d = matcher.match(t) ; assertEquals(-1, d, 0) ; } @Test public void match_09() { StatsMatcher matcher = matcher("((VAR :p LITERAL) 5)") ; Triple t = triple("(?x :p 1913)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } // Test first match wins. @Test public void match_10() { StatsMatcher matcher = matcher("((VAR :p LITERAL) 5) ((VAR :p ANY) 10)") ; Triple t = triple("(?x :p 1913)") ; double d = matcher.match(t) ; assertEquals(5, d, 0) ; } @Test public void match_11() { StatsMatcher matcher = matcher("((VAR :p ANY) 10) ((VAR :p LITERAL) 5)") ; Triple t = triple("(?x :p 1913)") ; double d = matcher.match(t) ; assertEquals(10, d, 0) ; } // Abbreviated forms. @Test public void match_20() { StatsMatcher matcher = matcher("(:p 10) ") ; Triple t = triple("(?x :p ?v)") ; double d = matcher.match(t) ; assertEquals(10, d, 0) ; } @Test public void match_21() { StatsMatcher matcher = matcher("(:p 10) ") ; Triple t = triple("(?x :p 1913)") ; double d = matcher.match(t) ; assertEquals(StatsMatcher.weightPO_small, d, 0) ; } @Test public void match_22() { StatsMatcher matcher = matcher("(:p 11)") ; Triple t = triple("(:x :p 1913)") ; double d = matcher.match(t) ; assertEquals(1, d, 0) ; } @Test public void match_23() { StatsMatcher matcher = matcher("(:p 11)") ; Triple t = triple("(:x ?p 1913)") ; // No match. double d = matcher.match(t) ; assertEquals(-1, d, 0) ; } @Test public void match_24() { StatsMatcher matcher = matcher("(:p 11) (TERM 12)") ; Triple t = triple("(?x :q ?v)") ; double d = matcher.match(t) ; assertEquals(12, d, 0) ; } // Bounds abbreviation rules. @Test public void match_25() { StatsMatcher matcher = matcher("(:p 3) (other 1)") ; Triple t = triple("(?x :p ?v)") ; double d = matcher.match(t) ; assertEquals(3, d, 0) ; } @Test public void match_26() { StatsMatcher matcher = matcher("(:pp 3) (other 1)") ; Triple t = triple("(:x :p ?v)") ; double d = matcher.match(t) ; assertEquals(1, d, 0) ; } // Bounds abbreviation rules. @Test public void match_27() { StatsMatcher matcher = matcher("(:p 200) (TERM 2)") ; Triple t = triple("(?x :q :v)") ; double d = matcher.match(t) ; assertEquals(2, d, 0) ; } @Test public void reorderIndexes1() { ReorderProc proc = new ReorderProcIndexes(new int[]{0,1}) ; BasicPattern bgp = bgp("(bgp (:x :p ?v) (:x :q ?w))") ; BasicPattern bgp2 = proc.reorder(bgp) ; assertEquals(bgp, bgp2) ; } @Test public void reorderIndexes2() { ReorderProc proc = new ReorderProcIndexes(new int[]{1,0}) ; BasicPattern bgp1 = bgp("(bgp (:x :p ?v) (:x :q ?w))") ; BasicPattern bgp2 = bgp("(bgp (:x :q ?w) (:x :p ?v))") ; BasicPattern bgp3 = proc.reorder(bgp1) ; assertEquals(bgp2, bgp3) ; } @Test public void stats_01() { StatsMatcher m = matcher("((:x :p ANY) 5)") ; ReorderTransformation transform = new ReorderWeighted(m) ; BasicPattern bgp = bgp("(bgp)") ; BasicPattern bgp2 = transform.reorder(bgp) ; assertEquals(bgp2, bgp) ; } @Test public void stats_dft_01() { ReorderTransformation transform = ReorderLib.fixed() ; BasicPattern bgp = bgp("(bgp)") ; BasicPattern bgp2 = transform.reorder(bgp) ; assertEquals(bgp2, bgp) ; } }
/* * Copyright 2017 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.socket.oio; import io.netty.buffer.ByteBufAllocator; import io.netty.channel.ChannelException; import io.netty.channel.ChannelOption; import io.netty.channel.MessageSizeEstimator; import io.netty.channel.PreferHeapByteBufAllocator; import io.netty.channel.RecvByteBufAllocator; import io.netty.channel.WriteBufferWaterMark; import io.netty.channel.socket.DatagramChannel; import io.netty.channel.socket.DefaultDatagramChannelConfig; import java.io.IOException; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.Map; import static io.netty.channel.ChannelOption.SO_TIMEOUT; final class DefaultOioDatagramChannelConfig extends DefaultDatagramChannelConfig implements OioDatagramChannelConfig { DefaultOioDatagramChannelConfig(DatagramChannel channel, DatagramSocket javaSocket) { super(channel, javaSocket); setAllocator(new PreferHeapByteBufAllocator(getAllocator())); } @Override public Map<ChannelOption<?>, Object> getOptions() { return getOptions(super.getOptions(), SO_TIMEOUT); } @SuppressWarnings("unchecked") @Override public <T> T getOption(ChannelOption<T> option) { if (option == SO_TIMEOUT) { return (T) Integer.valueOf(getSoTimeout()); } return super.getOption(option); } @Override public <T> boolean setOption(ChannelOption<T> option, T value) { validate(option, value); if (option == SO_TIMEOUT) { setSoTimeout((Integer) value); } else { return super.setOption(option, value); } return true; } @Override public OioDatagramChannelConfig setSoTimeout(int timeout) { try { javaSocket().setSoTimeout(timeout); } catch (IOException e) { throw new ChannelException(e); } return this; } @Override public int getSoTimeout() { try { return javaSocket().getSoTimeout(); } catch (IOException e) { throw new ChannelException(e); } } @Override public OioDatagramChannelConfig setBroadcast(boolean broadcast) { super.setBroadcast(broadcast); return this; } @Override public OioDatagramChannelConfig setInterface(InetAddress interfaceAddress) { super.setInterface(interfaceAddress); return this; } @Override public OioDatagramChannelConfig setLoopbackModeDisabled(boolean loopbackModeDisabled) { super.setLoopbackModeDisabled(loopbackModeDisabled); return this; } @Override public OioDatagramChannelConfig setNetworkInterface(NetworkInterface networkInterface) { super.setNetworkInterface(networkInterface); return this; } @Override public OioDatagramChannelConfig setReuseAddress(boolean reuseAddress) { super.setReuseAddress(reuseAddress); return this; } @Override public OioDatagramChannelConfig setReceiveBufferSize(int receiveBufferSize) { super.setReceiveBufferSize(receiveBufferSize); return this; } @Override public OioDatagramChannelConfig setSendBufferSize(int sendBufferSize) { super.setSendBufferSize(sendBufferSize); return this; } @Override public OioDatagramChannelConfig setTimeToLive(int ttl) { super.setTimeToLive(ttl); return this; } @Override public OioDatagramChannelConfig setTrafficClass(int trafficClass) { super.setTrafficClass(trafficClass); return this; } @Override public OioDatagramChannelConfig setWriteSpinCount(int writeSpinCount) { super.setWriteSpinCount(writeSpinCount); return this; } @Override public OioDatagramChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) { super.setConnectTimeoutMillis(connectTimeoutMillis); return this; } @Override public OioDatagramChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) { super.setMaxMessagesPerRead(maxMessagesPerRead); return this; } @Override public OioDatagramChannelConfig setAllocator(ByteBufAllocator allocator) { super.setAllocator(allocator); return this; } @Override public OioDatagramChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) { super.setRecvByteBufAllocator(allocator); return this; } @Override public OioDatagramChannelConfig setAutoRead(boolean autoRead) { super.setAutoRead(autoRead); return this; } @Override public OioDatagramChannelConfig setAutoClose(boolean autoClose) { super.setAutoClose(autoClose); return this; } @Override public OioDatagramChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) { super.setWriteBufferHighWaterMark(writeBufferHighWaterMark); return this; } @Override public OioDatagramChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) { super.setWriteBufferLowWaterMark(writeBufferLowWaterMark); return this; } @Override public OioDatagramChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark) { super.setWriteBufferWaterMark(writeBufferWaterMark); return this; } @Override public OioDatagramChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) { super.setMessageSizeEstimator(estimator); return this; } }
/** * Copyright 2013 John Ericksen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.parceler; import android.util.SparseArray; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import uk.co.jemos.podam.annotations.PodamCollection; import uk.co.jemos.podam.annotations.PodamStrategyValue; import java.util.*; /** * @author John Ericksen */ @Parcel public class ConverterTarget { @Transient private static final String[] FIELDS_EXCLUDED = {"stringHashSetArray", "sparseArray"}; byte b; Byte bobj; byte[] bya; Byte[] bbja; double d; Double dobj; double[] da; Double[] dobja; float f; Float fobj; float[] fa; Float[] fobja; int i; Integer iobj; int[] ia; Integer[] iobja; long l; Long lobj; long[] la; Long[] lobja; char c; Character cobj; char[] ca; Character[] cobja; boolean bo; Boolean boobj; boolean[] ba; Boolean[] bobja; String s; String[] sa; @PodamCollection(collectionElementStrategy = SubParcelStrategy.class) SubParcel[] subparcela; List<String> list; Map<String, String> map; @PodamStrategyValue(SubParcelStrategy.class) SubParcel parcel; @PodamCollection(collectionElementStrategy = SubParcelStrategy.class) List<SubParcel> parcelList; ArrayList<List<String>> multiList; @PodamCollection(mapKeyStrategy = SubParcelStrategy.class, mapElementStrategy = SubParcelStrategy.class) Map<SubParcel, SubParcel> parcelMap; @PodamCollection(mapKeyStrategy = SubParcelStrategy.class, mapElementStrategy = SubParcelStrategy.class) HashMap<SubParcel, SubParcel> parcelHashMap; Map<List<Map<String, Integer>>, Map<List<String>, Integer>> ridiculousMap; Set<String> stringSet; HashSet<String> stringHashSet; @PodamCollection(collectionElementStrategy = StringArrayStrategy.class) HashSet<String[]> stringHashSetArray; Integer[][] multidimensionalArray; SparseArray<String> sparseArray; public byte getB() { return b; } public void setB(byte b) { this.b = b; } public Byte getBobj() { return bobj; } public void setBobj(Byte bobj) { this.bobj = bobj; } public double getD() { return d; } public void setD(double d) { this.d = d; } public Double getDobj() { return dobj; } public void setDobj(Double dobj) { this.dobj = dobj; } public double[] getDa() { return da; } public void setDa(double[] da) { this.da = da; } public Double[] getDobja() { return dobja; } public void setDobja(Double[] dobja) { this.dobja = dobja; } public float getF() { return f; } public void setF(float f) { this.f = f; } public Float getFobj() { return fobj; } public void setFobj(Float fobj) { this.fobj = fobj; } public float[] getFa() { return fa; } public void setFa(float[] fa) { this.fa = fa; } public Float[] getFobja() { return fobja; } public void setFobja(Float[] fobja) { this.fobja = fobja; } public int getI() { return i; } public void setI(int i) { this.i = i; } public Integer getIobj() { return iobj; } public void setIobj(Integer iobj) { this.iobj = iobj; } public int[] getIa() { return ia; } public void setIa(int[] ia) { this.ia = ia; } public Integer[] getIobja() { return iobja; } public void setIobja(Integer[] iobja) { this.iobja = iobja; } public long getL() { return l; } public void setL(long l) { this.l = l; } public Long getLobj() { return lobj; } public void setLobj(Long lobj) { this.lobj = lobj; } public long[] getLa() { return la; } public void setLa(long[] la) { this.la = la; } public Long[] getLobja() { return lobja; } public void setLobja(Long[] lobja) { this.lobja = lobja; } public String getS() { return s; } public void setS(String s) { this.s = s; } public String[] getSa() { return sa; } public void setSa(String[] sa) { this.sa = sa; } public SubParcel[] getSubparcela() { return subparcela; } public void setSubparcela(SubParcel[] subparcela) { this.subparcela = subparcela; } public List<String> getList() { return list; } public void setList(List<String> list) { this.list = list; } public Map<String, String> getMap() { return map; } public void setMap(Map<String, String> map) { this.map = map; } public SubParcel getParcel() { return parcel; } public void setParcel(SubParcel parcel) { this.parcel = parcel; } public List<SubParcel> getParcelList() { return parcelList; } public void setParcelList(List<SubParcel> parcelList) { this.parcelList = parcelList; } public ArrayList<List<String>> getMultiList() { return multiList; } public void setMultiList(ArrayList<List<String>> multiList) { this.multiList = multiList; } public Map<SubParcel, SubParcel> getParcelMap() { return parcelMap; } public void setParcelMap(Map<SubParcel, SubParcel> parcelMap) { this.parcelMap = parcelMap; } public HashMap<SubParcel, SubParcel> getParcelHashMap() { return parcelHashMap; } public void setParcelHashMap(HashMap<SubParcel, SubParcel> parcelHashMap) { this.parcelHashMap = parcelHashMap; } public Map<List<Map<String, Integer>>, Map<List<String>, Integer>> getRidiculousMap() { return ridiculousMap; } public void setRidiculousMap(Map<List<Map<String, Integer>>, Map<List<String>, Integer>> ridiculousMap) { this.ridiculousMap = ridiculousMap; } public Set<String> getStringSet() { return stringSet; } public void setStringSet(Set<String> stringSet) { this.stringSet = stringSet; } public HashSet<String> getStringHashSet() { return stringHashSet; } public void setStringHashSet(HashSet<String> stringHashSet) { this.stringHashSet = stringHashSet; } public HashSet<String[]> getStringHashSetArray() { return stringHashSetArray; } public void setStringHashSetArray(HashSet<String[]> stringHashSetArray) { this.stringHashSetArray = stringHashSetArray; } public Integer[][] getMultidimensionalArray() { return multidimensionalArray; } public void setMultidimensionalArray(Integer[][] multidimensionalArray) { this.multidimensionalArray = multidimensionalArray; } public SparseArray<String> getSparseArray() { return sparseArray; } public void setSparseArray(SparseArray<String> sparseArray) { this.sparseArray = sparseArray; } public byte[] getBya() { return bya; } public void setBya(byte[] bya) { this.bya = bya; } public Byte[] getBbja() { return bbja; } public void setBbja(Byte[] bbja) { this.bbja = bbja; } public char getC() { return c; } public void setC(char c) { this.c = c; } public Character getCobj() { return cobj; } public void setCobj(Character cobj) { this.cobj = cobj; } public char[] getCa() { return ca; } public void setCa(char[] ca) { this.ca = ca; } public Character[] getCobja() { return cobja; } public void setCobja(Character[] cobja) { this.cobja = cobja; } public boolean isBo() { return bo; } public void setBo(boolean bo) { this.bo = bo; } public Boolean getBoobj() { return boobj; } public void setBoobj(Boolean boobj) { this.boobj = boobj; } public boolean[] getBa() { return ba; } public void setBa(boolean[] ba) { this.ba = ba; } public Boolean[] getBobja() { return bobja; } public void setBobja(Boolean[] bobja) { this.bobja = bobja; } @Override public boolean equals(Object that) { return EqualsBuilder.reflectionEquals(this, that, FIELDS_EXCLUDED); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this, FIELDS_EXCLUDED); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.chain2.config.xml; import static org.apache.commons.chain2.testutils.HasCommandCount.hasCommandCount; import static org.apache.commons.chain2.testutils.HasLog.hasLog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.net.URL; import java.util.Arrays; import java.util.List; import org.apache.commons.chain2.Catalog; import org.apache.commons.chain2.CatalogFactory; import org.apache.commons.chain2.Context; import org.apache.commons.chain2.Processing; import org.apache.commons.chain2.testutils.AddingCommand; import org.apache.commons.chain2.impl.CatalogBase; import org.apache.commons.chain2.impl.CatalogFactoryBase; import org.apache.commons.chain2.impl.ChainBase; import org.apache.commons.chain2.impl.ContextBase; import org.apache.commons.chain2.testutils.DelegatingCommand; import org.apache.commons.chain2.testutils.DelegatingFilter; import org.apache.commons.chain2.testutils.ExceptionCommand; import org.apache.commons.chain2.testutils.ExceptionFilter; import org.apache.commons.chain2.testutils.NonDelegatingCommand; import org.apache.commons.chain2.testutils.NonDelegatingFilter; import org.apache.commons.digester3.Digester; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; /** * Parameterized test case for {@link XmlConfigParser}, that uses config locations as data points. * * <p><strong>Note:</strong> This test case assumes, that all config files will be parsed to the same catalog * and command instances.</p> * * @version $Id$ */ @RunWith(Parameterized.class) public class XmlConfigParserTestCase { private Catalog<String, Object, Context<String, Object>> catalog = null; private Context<String, Object> context = null; private XmlConfigParser parser = null; private final String configLocation; @Parameterized.Parameters public static List<Object[]> data() { return Arrays.asList(new Object[][] { {"/org/apache/commons/chain2/config/xml/test-config.xml"}, {"/org/apache/commons/chain2/config/xml/test-config-2.xml"} } ); } public XmlConfigParserTestCase(String configLocation) { this.configLocation = configLocation; } @Before public void setUp() throws Exception { init(); load(configLocation); } private void init() { CatalogFactoryBase.clear(); catalog = new CatalogBase<String, Object, Context<String, Object>>(); context = new ContextBase(); parser = new XmlConfigParser(); } @After public void tearDown() { parser = null; context = null; catalog = null; } // Load the default test-config.xml file and examine the results @Test public void testDefault() throws Exception { // Check overall command count assertThat(catalog, hasCommandCount(17)); // Check individual single command instances { AddingCommand command = catalog.getCommand("AddingCommand"); assertNotNull(command); } { DelegatingCommand command = catalog.getCommand("DelegatingCommand"); assertNotNull(command); } { DelegatingFilter command = catalog.getCommand("DelegatingFilter"); assertNotNull(command); } { ExceptionCommand command = catalog.getCommand("ExceptionCommand"); assertNotNull(command); } { ExceptionFilter command = catalog.getCommand("ExceptionFilter"); assertNotNull(command); } { NonDelegatingCommand command = catalog.getCommand("NonDelegatingCommand"); assertNotNull(command); } { NonDelegatingFilter command = catalog.getCommand("NonDelegatingFilter"); assertNotNull(command); } ChainBase chain = catalog.getCommand("ChainBase"); assertNotNull(chain); assertTrue(chain instanceof TestChain); // Check configurable properties instance TestCommand tcommand = catalog.getCommand("Configurable"); assertNotNull(tcommand); assertEquals("Foo Value", tcommand.getFoo()); assertEquals("Bar Value", tcommand.getBar()); } // Test execution of chain "Execute2a" @Test public void testExecute2a() throws Exception { assertEquals(Processing.FINISHED, catalog.getCommand("Execute2a").execute(context)); assertThat(context, hasLog("1/2/3")); } // Test execution of chain "Execute2b" @Test public void testExecute2b() throws Exception { assertEquals(Processing.CONTINUE, catalog.getCommand("Execute2b").execute(context)); assertThat(context, hasLog("1/2/3")); } // Test execution of chain "Execute2c" @Test public void testExecute2c() throws Exception { try { catalog.getCommand("Execute2c").execute(context); } catch (ArithmeticException e) { assertEquals("Correct exception id", "3", e.getMessage()); } assertThat(context, hasLog("1/2/3")); } // Test execution of chain "Execute2d" @Test public void testExecute2d() throws Exception { try { catalog.getCommand("Execute2d").execute(context); } catch (ArithmeticException e) { assertEquals("Correct exception id", "2", e.getMessage()); } assertThat(context, hasLog("1/2")); } // Test execution of chain "Execute4a" @Test public void testExecute4a() throws Exception { assertEquals(Processing.FINISHED, catalog.getCommand("Execute4a").execute(context)); assertThat(context, hasLog("1/2/3/c/a")); } // Test execution of chain "Execute2b" @Test public void testExecute4b() throws Exception { assertEquals(Processing.CONTINUE, catalog.getCommand("Execute4b").execute(context)); assertThat(context, hasLog("1/2/3/b")); } // Test execution of chain "Execute4c" @Test public void testExecute4c() throws Exception { try { catalog.getCommand("Execute4c").execute(context); } catch (ArithmeticException e) { assertEquals("Correct exception id", "3", e.getMessage()); } assertThat(context, hasLog("1/2/3/c/b/a")); } // Test execution of chain "Execute4d" @Test public void testExecute4d() throws Exception { try { catalog.getCommand("Execute4d").execute(context); } catch (ArithmeticException e) { assertEquals("Correct exception id", "2", e.getMessage()); } assertThat(context, hasLog("1/2/b/a")); } // Test a pristine ConfigParser instance @Test public void testPristine() throws Exception { init(); // Validate the "digester" property Digester digester = parser.getDigester(); assertNotNull("Returned a Digester instance", digester); assertFalse("Default namespaceAware", digester.getNamespaceAware()); assertTrue("Default useContextClassLoader", digester.getUseContextClassLoader()); assertFalse("Default validating", digester.getValidating()); // Validate the "ruleSet" property ConfigRuleSet ruleSet = (ConfigRuleSet) parser.getRuleSet(); assertNotNull("Returned a RuleSet instance", ruleSet); assertEquals("Default chainElement", "chain", ruleSet.getChainElement()); assertEquals("Default classAttribute", "className", ruleSet.getClassAttribute()); assertEquals("Default commandElement", "command", ruleSet.getCommandElement()); assertEquals("Default nameAttribute", "name", ruleSet.getNameAttribute()); assertNull("Default namespaceURI", ruleSet.getNamespaceURI()); // Validate the "useContextClassLoader" property assertTrue("Defaults to use context class loader", parser.getUseContextClassLoader()); // Ensure that there are no preconfigured commands in the catalog assertThat(catalog, hasCommandCount(0)); } // Load the specified catalog from the specified resource path private void load(String path) throws Exception { URL url = getClass().getResource(path); if (url == null) { String msg = String.format("Can't find resource for path: %s", path); throw new IllegalArgumentException(msg); } CatalogFactory<String, Object, Context<String, Object>> catalogFactory = parser.parse(url); catalog = catalogFactory.getCatalog("foo"); } }
package com.aspose.gridweb.test.servlet; import java.util.Date; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; // //import com.aspose.cells.GridCell; //import com.aspose.cells.GridCells; //import com.aspose.cells.GridTableItemStyle; //import com.aspose.cells.GridWebBean; //import com.aspose.cells.GridWorksheetCollection; import com.aspose.gridweb.*; import com.aspose.gridweb.test.TestGridWebBaseServlet; public class FormatServlet extends TestGridWebBaseServlet { private static final long serialVersionUID = 1L; @Override public void reload(GridWebBean gridweb,HttpServletRequest request, HttpServletResponse response) { try { super.reloadfile(gridweb,request, "format.xls"); } catch (Exception e) { e.printStackTrace(); } } public void loadCustomFormatFile(GridWebBean gridweb,HttpServletRequest request, HttpServletResponse response) { this.reload(gridweb,request, response); GridCells gridCells = gridweb.getActiveSheet().getCells(); gridCells.get("A1").setValue("Custom Format"); gridCells.get("A2").setValue("0.0"); gridCells.get("A3").setValue("0.000"); gridCells.get("A4").setValue("#,##0.0"); gridCells.get("A5").setValue("US$#,##0;US$-#,##0"); gridCells.get("A6").setValue("0.0%"); gridCells.get("A7").setValue("0.000E+00"); gridCells.get("A8").setValue("yyyy-m-d h:mm"); gridCells.get("B1").setValue("Format Results"); GridCell B2 = gridCells.get("B2"); B2.setValue(12345.6789); B2.setCustom("0.0"); GridCell B3 = gridCells.get("B3"); B3.setValue(12345.6789); B3.setCustom("0.000"); GridCell B4 = gridCells.get("B4"); B4.setValue(543123456.789); B4.setCustom("#,##0.0"); GridCell B5 = gridCells.get("B5"); B5.setValue(-543123456.789); B5.setCustom("US$#,##0;US$-#,##0"); GridCell B6 = gridCells.get("B6"); B6.setValue(0.925687); B6.setCustom("0.0%"); GridCell B7 = gridCells.get("B7"); B7.setValue(-1234567890.5687); B7.setCustom("0.000E+00"); GridCell B8 = gridCells.get("B8"); B8.setValue(new Date()); B8.setCustom("yyyy-m-d h:mm"); } public void customFormat(GridWebBean gridweb,HttpServletRequest request, HttpServletResponse response) { this.reload(gridweb,request, response); GridCells gridCells = gridweb.getActiveSheet().getCells(); gridCells.get("A1").setValue("Custom Format"); gridCells.get("A2").setValue(request.getParameter("format")); gridCells.get("B1").setValue("Format Results"); GridCell B2 = gridCells.get("B2"); ///notice we use this api to automatically convert string value B2.putValue(request.getParameter("value"),true); GridTableItemStyle B2Style = B2.getStyle(); B2Style.setCustom(request.getParameter("format")); B2.setStyle(B2Style); } public void loadDateTimeFormatFile(GridWebBean gridweb,HttpServletRequest request, HttpServletResponse response) { this.reload(gridweb,request, response); GridCells gridCells = gridweb.getActiveSheet().getCells(); gridCells.get("A1").setValue("Number Type"); gridCells.get("A2").setValue("Date 1:"); gridCells.get("A3").setValue("Date 2:"); gridCells.get("A4").setValue("Date 3:"); gridCells.get("A5").setValue("Date 4:"); gridCells.get("A6").setValue("Time 1:"); gridCells.get("A7").setValue("Time 2:"); gridCells.get("A8").setValue("Time 3:"); gridCells.get("A9").setValue("Time 4:"); gridCells.get("A10").setValue("Time 5:"); gridCells.get("A11").setValue("Time 6:"); gridCells.get("A12").setValue("Time 7:"); gridCells.get("A13").setValue("Time 8:"); gridCells.get("A14").setValue("EasternDate 1:"); gridCells.get("A15").setValue("EasternDate 2:"); gridCells.get("A16").setValue("EasternDate 3:"); gridCells.get("A17").setValue("EasternDate 4:"); gridCells.get("A18").setValue("EasternDate 5:"); gridCells.get("A19").setValue("EasternDate 6:"); gridCells.get("A20").setValue("EasternDate 7:"); gridCells.get("A21").setValue("EasternDate 8:"); gridCells.get("A22").setValue("EasternDate 9:"); gridCells.get("A23").setValue("EasternDate 10:"); gridCells.get("A24").setValue("EasternDate 11:"); gridCells.get("A25").setValue("EasternDate 12:"); gridCells.get("A26").setValue("EasternDate 13:"); gridCells.get("A27").setValue("EasternTime 1:"); gridCells.get("A28").setValue("EasternTime 2:"); gridCells.get("A29").setValue("EasternTime 3:"); gridCells.get("A30").setValue("EasternTime 4:"); gridCells.get("A31").setValue("EasternTime 5:"); gridCells.get("A32").setValue("EasternTime 6:"); gridCells.get("B1").setValue("Format Results"); GridCell B2 = gridCells.get("B2"); B2.setValue(new Date()); B2.setNumberType(14); GridCell B3 = gridCells.get("B3"); B3.setValue(new Date()); B3.setNumberType(15); GridCell B4 = gridCells.get("B4"); B4.setValue(new Date()); B4.setNumberType(16); GridCell B5 = gridCells.get("B5"); B5.setValue(new Date()); B5.setNumberType(17); GridCell B6 = gridCells.get("B6"); B6.setValue(new Date()); B6.setNumberType(18); GridCell B7 = gridCells.get("B7"); B7.setValue(new Date()); B7.setNumberType(19); GridCell B8 = gridCells.get("B8"); B8.setValue(new Date()); B8.setNumberType(20); GridCell B9 = gridCells.get("B9"); B9.setValue(new Date()); B9.setNumberType(21); GridCell B10 = gridCells.get("B10"); B10.setValue(new Date()); B10.setNumberType(22); GridCell B11 = gridCells.get("B11"); B11.setValue(new Date()); B11.setNumberType(45); GridCell B12 = gridCells.get("B12"); B12.setValue(new Date()); B12.setNumberType(46); GridCell B13 = gridCells.get("B13"); B13.setValue(new Date()); B13.setNumberType(47); GridCell B14 = gridCells.get("B14"); B14.setValue(new Date()); B14.setNumberType(27); GridCell B15 = gridCells.get("B15"); B15.setValue(new Date()); B15.setNumberType(28); GridCell B16 = gridCells.get("B16"); B16.setValue(new Date()); B16.setNumberType(29); GridCell B17 = gridCells.get("B17"); B17.setValue(new Date()); B17.setNumberType(30); GridCell B18 = gridCells.get("B18"); B18.setValue(new Date()); B18.setNumberType(31); GridCell B19 = gridCells.get("B19"); B19.setValue(new Date()); B19.setNumberType(36); GridCell B20 = gridCells.get("B20"); B20.setValue(new Date()); B20.setNumberType(50); GridCell B21 = gridCells.get("B21"); B21.setValue(new Date()); B21.setNumberType(51); GridCell B22 = gridCells.get("B22"); B22.setValue(new Date()); B22.setNumberType(52); GridCell B23 = gridCells.get("B23"); B23.setValue(new Date()); B23.setNumberType(53); GridCell B24 = gridCells.get("B24"); B24.setValue(new Date()); B24.setNumberType(54); GridCell B25 = gridCells.get("B25"); B25.setValue(new Date()); B25.setNumberType(57); GridCell B26 = gridCells.get("B26"); B26.setValue(new Date()); B26.setNumberType(58); GridCell B27 = gridCells.get("B27"); B27.setValue(new Date()); B27.setNumberType(32); GridCell B28 = gridCells.get("B28"); B28.setValue(new Date()); B28.setNumberType(33); GridCell B29 = gridCells.get("B29"); B29.setValue(new Date()); B29.setNumberType(34); GridCell B30 = gridCells.get("B30"); B30.setValue(new Date()); B30.setNumberType(35); GridCell B31 = gridCells.get("B31"); B31.setValue(new Date()); B31.setNumberType(55); GridCell B32 = gridCells.get("B32"); B32.setValue(new Date()); B32.setNumberType(56); } public void dateAndTime(GridWebBean gridweb,HttpServletRequest request, HttpServletResponse response) { this.reload(gridweb,request, response); String value = (request.getParameter("value")); int numberType = Integer.parseInt(request.getParameter("DropDownList1")); String text = request.getParameter("text"); GridCells gridCells = gridweb.getActiveSheet().getCells(); gridCells.get("A1").setValue("Number Type"); gridCells.get("B1").setValue("Format Results"); gridCells.get("A2").setValue(text); GridCell B2 = gridCells.get("B2"); ///notice we use this api to automatically convert string value B2.putValue(value,true); B2.setNumberType(numberType); } }
package za.org.grassroot.webapp.controller.ussd; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import za.org.grassroot.webapp.model.ussd.AAT.Request; import java.net.URISyntaxException; import static org.springframework.web.bind.annotation.RequestMethod.GET; import static za.org.grassroot.webapp.controller.ussd.UssdSafetyGroupService.*; import static za.org.grassroot.webapp.controller.ussd.UssdSupport.*; /** * Created by paballo on 2016/07/13. */ @RequestMapping(method = GET, produces = MediaType.APPLICATION_XML_VALUE) @RestController public class USSDSafetyGroupController { private final UssdSafetyGroupService ussdSafetyGroupService; private static final String safetyGroupPath = homePath + safetyGroup + "/"; private static final String groupUidParam = "groupUid"; @Autowired public USSDSafetyGroupController(UssdSafetyGroupService ussdSafetyGroupService) { this.ussdSafetyGroupService = ussdSafetyGroupService; } @RequestMapping(value = safetyGroupPath + startMenu) @ResponseBody public Request manageSafetyGroup(@RequestParam String msisdn) throws URISyntaxException { return ussdSafetyGroupService.processManageSafetyGroup(msisdn); } @RequestMapping(value = safetyGroupPath + pickGroup) @ResponseBody public Request pickSafetyGroup(@RequestParam String msisdn) throws URISyntaxException { return ussdSafetyGroupService.processPickSafetyGroup(msisdn); } @RequestMapping(value = safetyGroupPath + pickGroup + doSuffix) @ResponseBody public Request pickSafetyGroupDo(@RequestParam String msisdn, @RequestParam(value = groupUidParam) String groupUid) throws URISyntaxException { return ussdSafetyGroupService.processPickSafetyGroupDo(msisdn, groupUid); } /* SECTION: Request and grant permission to track location note : switching off due to removal of LBS */ // @RequestMapping(value = safetyGroupPath + "location/request") // @ResponseBody // public Request requestLocationTracking(@RequestParam String msisdn) throws URISyntaxException { // return ussdSafetyGroupService.processRequestLocationTracking(msisdn); // } // @RequestMapping(value = safetyGroupPath + "location/request/allowed") // @ResponseBody // public Request approveLocationTracking(@RequestParam String msisdn) throws URISyntaxException { // return ussdSafetyGroupService.processApproveLocationTracking(msisdn); // } // @RequestMapping(value = safetyGroupPath + "location/revoke") // @ResponseBody // public Request revokeLocationTracking(@RequestParam String msisdn) throws URISyntaxException { // return ussdSafetyGroupService.processRevokeLocationTracking(msisdn); // } // @RequestMapping(value = safetyGroupPath + "location/current") // public Request checkCurrentLocation(@RequestParam String msisdn) throws URISyntaxException { // return ussdSafetyGroupService.processCheckCurrentLocation(msisdn); // } // @RequestMapping(value = safetyGroupPath + "location/current/confirm") // public Request respondToCurrentLocation(@RequestParam String msisdn, @RequestParam String addressUid, // @RequestParam double latitude, @RequestParam double longitude) throws URISyntaxException { // return ussdSafetyGroupService.processRespondToCurrentLocation(msisdn, addressUid, latitude, longitude); // } // @RequestMapping(value = safetyGroupPath + "location/current/change") // public Request changeCurrentLocation(@RequestParam String msisdn, @RequestParam String addressUid, // @RequestParam double latitude, @RequestParam double longitude) throws URISyntaxException { // return ussdSafetyGroupService.processChangeCurrentLocation(msisdn, addressUid, latitude, longitude); // } // @RequestMapping(value = safetyGroupPath + "location/current/describe") // public Request describeCurrentLocation(@RequestParam String msisdn, @RequestParam String addressUid, // @RequestParam double latitude, @RequestParam double longitude, // @RequestParam String request) throws URISyntaxException { // return ussdSafetyGroupService.processDescribeCurrentLocation(msisdn, addressUid, latitude, longitude, request); // } /* SECTION: Creating a safety group */ @RequestMapping(value = safetyGroupPath + newGroup) @ResponseBody public Request newGroup(@RequestParam(value = phoneNumber) String inputNumber) throws URISyntaxException { return ussdSafetyGroupService.processNewGroup(inputNumber); } @RequestMapping(value = safetyGroupPath + createGroupMenu) @ResponseBody public Request createGroup(@RequestParam(value = phoneNumber, required = true) String inputNumber, @RequestParam(value = userInputParam, required = false) String groupName, @RequestParam(value = interruptedFlag, required = false) boolean interrupted, @RequestParam(value = groupUidParam, required = false) String interGroupUid) throws URISyntaxException { return ussdSafetyGroupService.processCreateGroup(inputNumber, groupName, interrupted, interGroupUid); } @RequestMapping(value = safetyGroupPath + addRespondents) @ResponseBody public Request addRespondersPrompt(@RequestParam(phoneNumber) String inputNumber, @RequestParam(groupUidParam) String groupUid) throws URISyntaxException { return ussdSafetyGroupService.processAddRespondersPrompt(inputNumber, groupUid); } @RequestMapping(value = safetyGroupPath + addRespondents + doSuffix) @ResponseBody public Request addRespondentsToGroup(@RequestParam(value = phoneNumber, required = true) String inputNumber, @RequestParam(value = groupUidParam, required = true) String groupUid, @RequestParam(value = userInputParam, required = true) String userInput, @RequestParam(value = "prior_input", required = false) String priorInput) throws URISyntaxException { return ussdSafetyGroupService.processAddRespondentsToGroup(inputNumber, groupUid, userInput, priorInput); } @RequestMapping(value = safetyGroupPath + resetSafetyGroup) @ResponseBody public Request resetPrompt(@RequestParam(value = phoneNumber) String inputNumber) throws URISyntaxException { return ussdSafetyGroupService.processResetPrompt(inputNumber); } @RequestMapping(value = safetyGroupPath + resetSafetyGroup + doSuffix) @ResponseBody public Request resetDo(@RequestParam(value = phoneNumber) String inputNumber, @RequestParam(value = "deactivate", required = false) boolean deactivate, @RequestParam(value = interruptedFlag, required = false) boolean interrupted) throws URISyntaxException { return ussdSafetyGroupService.processResetDo(inputNumber, deactivate, interrupted); } /* SECTION: Handling addresses */ @RequestMapping(value = safetyGroupPath + viewAddress) @ResponseBody public Request viewAddress(@RequestParam String msisdn) throws URISyntaxException { return ussdSafetyGroupService.processViewAddress(msisdn); } @RequestMapping(value = safetyGroupPath + addAddress) @ResponseBody public Request addAddress(@RequestParam String msisdn, @RequestParam(value = userInputParam, required = false) String fieldValue, @RequestParam(value = "interrupted", required = false) boolean interrupted, @RequestParam(value = "field", required = false) String field) throws URISyntaxException { return ussdSafetyGroupService.processAddAddress(msisdn, fieldValue, interrupted, field); } @RequestMapping(value = safetyGroupPath + changeAddress) @ResponseBody public Request changeAddressPrompt(@RequestParam String msisdn, @RequestParam(value = "field", required = false) String field) throws URISyntaxException { return ussdSafetyGroupService.processChangeAddressPrompt(msisdn, field); } @RequestMapping(value = safetyGroupPath + changeAddress + doSuffix) @ResponseBody public Request changeAddressDo(@RequestParam String msisdn, @RequestParam(value = userInputParam) String fieldValue, @RequestParam(value = "interrupted", required = false) boolean interrupted, @RequestParam(value = "field", required = false) String field) throws URISyntaxException { return ussdSafetyGroupService.processChangeAddressDo(msisdn, fieldValue, interrupted, field); } @RequestMapping(value = safetyGroupPath + removeAddress) @ResponseBody public Request removeAddress(@RequestParam String msisdn) throws URISyntaxException { return ussdSafetyGroupService.processRemoveAddress(msisdn); } @RequestMapping(value = safetyGroupPath + removeAddress + doSuffix) @ResponseBody public Request removeAddressDo(@RequestParam String msisdn, @RequestParam(value = interruptedFlag, required = false) boolean interrupted) throws URISyntaxException { return ussdSafetyGroupService.processRemoveAddressDo(msisdn, interrupted); } /* SECTION: Handling responses */ @RequestMapping(value = safetyGroupPath + recordResponse + doSuffix) @ResponseBody public Request recordResponse(@RequestParam(value = phoneNumber) String inputNumber, @RequestParam(value = entityUidParam) String safetyEventUid, @RequestParam(value = yesOrNoParam) boolean responded) throws URISyntaxException { return ussdSafetyGroupService.processRecordResponse(inputNumber, safetyEventUid, responded); } @RequestMapping(value = safetyGroupPath + recordValidity + doSuffix) @ResponseBody public Request recordValidity(@RequestParam(value = phoneNumber) String inputNumber, @RequestParam(value = entityUidParam) String safetyEventUid, @RequestParam("response") boolean validity) throws URISyntaxException { return ussdSafetyGroupService.processRecordValidity(inputNumber, safetyEventUid, validity); } }
/** * Copyright 2007-2015, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.k3po.driver.internal.netty.bootstrap.agrona; import static java.lang.String.format; import static java.lang.Thread.sleep; import static java.nio.ByteOrder.nativeOrder; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.concurrent.TimeUnit.SECONDS; import static org.jboss.netty.buffer.ChannelBuffers.buffer; import static org.jboss.netty.channel.Channels.pipeline; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.kaazing.k3po.driver.internal.netty.channel.ChannelAddressFactory.newChannelAddressFactory; import static org.kaazing.k3po.driver.internal.netty.channel.Channels.flush; import static org.kaazing.k3po.driver.internal.netty.channel.agrona.AgronaChannelAddress.OPTION_READER; import static org.kaazing.k3po.driver.internal.netty.channel.agrona.AgronaChannelAddress.OPTION_WRITER; import static org.mockito.Matchers.any; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.internal.verification.VerificationModeFactory.times; import java.net.URI; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.agrona.MutableDirectBuffer; import org.agrona.concurrent.MessageHandler; import org.agrona.concurrent.UnsafeBuffer; import org.agrona.concurrent.broadcast.BroadcastBufferDescriptor; import org.agrona.concurrent.broadcast.BroadcastReceiver; import org.agrona.concurrent.broadcast.BroadcastTransmitter; import org.agrona.concurrent.broadcast.CopyBroadcastReceiver; import org.agrona.concurrent.ringbuffer.ManyToOneRingBuffer; import org.agrona.concurrent.ringbuffer.RingBufferDescriptor; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelEvent; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.WriteCompletionEvent; import org.junit.Rule; import org.junit.experimental.theories.DataPoints; import org.junit.experimental.theories.Theories; import org.junit.experimental.theories.Theory; import org.junit.rules.DisableOnDebug; import org.junit.rules.TestRule; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.kaazing.k3po.driver.internal.netty.bootstrap.ClientBootstrapRule; import org.kaazing.k3po.driver.internal.netty.channel.ChannelAddress; import org.kaazing.k3po.driver.internal.netty.channel.ChannelAddressFactory; import org.kaazing.k3po.driver.internal.netty.channel.FlushEvent; import org.kaazing.k3po.driver.internal.netty.channel.SimpleChannelHandler; import org.kaazing.k3po.driver.internal.netty.channel.agrona.BroadcastTransmitterChannelWriter; import org.kaazing.k3po.driver.internal.netty.channel.agrona.ChannelReader; import org.kaazing.k3po.driver.internal.netty.channel.agrona.ChannelWriter; import org.kaazing.k3po.driver.internal.netty.channel.agrona.CopyBroadcastReceiverChannelReader; import org.kaazing.k3po.driver.internal.netty.channel.agrona.RingBufferChannelReader; import org.kaazing.k3po.driver.internal.netty.channel.agrona.RingBufferChannelWriter; import org.mockito.InOrder; @RunWith(Theories.class) public class AgronaClientBootstrapTest { private static final int BUFFER_CAPACITY = 4096; private static final int BROADCAST_BUFFER_TOTAL_LENGTH = BUFFER_CAPACITY + BroadcastBufferDescriptor.TRAILER_LENGTH; private static final int RING_BUFFER_TOTAL_LENGTH = BUFFER_CAPACITY + RingBufferDescriptor.TRAILER_LENGTH; private enum ReaderStrategy { MANY_TO_ONE_RING_BUFFER, BROADCAST_RECEIVER } private enum WriterStrategy { MANY_TO_ONE_RING_BUFFER, BROADCAST_TRANSMITTER } @DataPoints public static final Set<ReaderStrategy> READER_STRATEGIES = EnumSet.allOf(ReaderStrategy.class); @DataPoints public static final Set<WriterStrategy> WRITER_STRATEGIES = EnumSet.allOf(WriterStrategy.class); @Rule public final ClientBootstrapRule bootstrap = new ClientBootstrapRule("agrona"); @Rule public final TestRule timeout = new DisableOnDebug(new Timeout(5, SECONDS)); @Theory public void shouldConnectEchoThenClose(WriterStrategy pingStrategy, ReaderStrategy pongStrategy) throws Exception { final AtomicInteger pongsReceived = new AtomicInteger(); SimpleChannelHandler client = new SimpleChannelHandler() { @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { pongsReceived.incrementAndGet(); super.messageReceived(ctx, e); } }; SimpleChannelHandler clientSpy = spy(client); bootstrap.setPipeline(pipeline(clientSpy)); ChannelReader pingReader; ChannelWriter pingWriter; switch (pingStrategy) { case MANY_TO_ONE_RING_BUFFER: UnsafeBuffer manyToOnePingBuffer = new UnsafeBuffer(new byte[RING_BUFFER_TOTAL_LENGTH]); pingReader = new RingBufferChannelReader(new ManyToOneRingBuffer(manyToOnePingBuffer)); pingWriter = new RingBufferChannelWriter(new ManyToOneRingBuffer(manyToOnePingBuffer)); break; case BROADCAST_TRANSMITTER: UnsafeBuffer broadcastPingBuffer = new UnsafeBuffer(new byte[BROADCAST_BUFFER_TOTAL_LENGTH]); BroadcastReceiver receiver = new BroadcastReceiver(broadcastPingBuffer); pingReader = new CopyBroadcastReceiverChannelReader(new CopyBroadcastReceiver(receiver)); pingWriter = new BroadcastTransmitterChannelWriter(new BroadcastTransmitter(broadcastPingBuffer)); break; default: throw new IllegalArgumentException(format("Unexpected writer strategy %s", pingStrategy)); } ChannelReader pongReader; ChannelWriter pongWriter; switch (pongStrategy) { case MANY_TO_ONE_RING_BUFFER: UnsafeBuffer manyToOnePongBuffer = new UnsafeBuffer(new byte[RING_BUFFER_TOTAL_LENGTH]); pongReader = new RingBufferChannelReader(new ManyToOneRingBuffer(manyToOnePongBuffer)); pongWriter = new RingBufferChannelWriter(new ManyToOneRingBuffer(manyToOnePongBuffer)); break; case BROADCAST_RECEIVER: UnsafeBuffer broadcastPongBuffer = new UnsafeBuffer(new byte[BROADCAST_BUFFER_TOTAL_LENGTH]); BroadcastReceiver pongReceiver = new BroadcastReceiver(broadcastPongBuffer); pongReader = new CopyBroadcastReceiverChannelReader(new CopyBroadcastReceiver(pongReceiver)); BroadcastTransmitter transmitter = new BroadcastTransmitter(broadcastPongBuffer); pongWriter = new BroadcastTransmitterChannelWriter(transmitter); break; default: throw new IllegalArgumentException(format("Unexpected reader strategy %s", pongStrategy)); } ChannelAddressFactory channelAddressFactory = newChannelAddressFactory(); URI location = URI.create("agrona://stream/bidirectional"); Map<String, Object> options = new HashMap<>(); options.put(OPTION_READER, pongReader); options.put(OPTION_WRITER, pingWriter); ChannelAddress channelAddress = channelAddressFactory.newChannelAddress(location, options); ChannelBuffer ping = buffer(nativeOrder(), 256); ping.writeInt(0x01); ping.writeBytes("Hello, world".getBytes(UTF_8)); Channel channel = bootstrap.connect(channelAddress).syncUninterruptibly().getChannel(); channel.write(ping).syncUninterruptibly(); flush(channel); final AtomicReference<Message> pongRef = new AtomicReference<>(); final MessageHandler messageHandler = new MessageHandler() { @Override public void onMessage(int msgTypeId, MutableDirectBuffer buffer, int index, int length) { Message pong = new Message(); pong.typeId = msgTypeId; pong.payload = buffer.getStringWithoutLengthUtf8(index, length); pongRef.set(pong); } }; while (pingReader.read(messageHandler) == 0) { sleep(1); } Message pong = pongRef.get(); assertNotNull(pong); UnsafeBuffer srcBuffer = new UnsafeBuffer(pong.payload.getBytes(UTF_8)); pongWriter.write(pong.typeId, srcBuffer, 0, srcBuffer.capacity()); while (pongsReceived.get() == 0) { sleep(1); } channel.close().syncUninterruptibly(); bootstrap.shutdown(); assertEquals(0x01, pong.typeId); assertEquals("Hello, world", pong.payload); verify(clientSpy, times(9)).handleUpstream(any(ChannelHandlerContext.class), any(ChannelEvent.class)); verify(clientSpy, times(4)).handleDownstream(any(ChannelHandlerContext.class), any(ChannelEvent.class)); InOrder childConnect = inOrder(clientSpy); childConnect.verify(clientSpy).channelOpen(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); childConnect.verify(clientSpy).connectRequested(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); childConnect.verify(clientSpy).channelBound(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); childConnect.verify(clientSpy).channelConnected(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); InOrder childWrite = inOrder(clientSpy); childWrite.verify(clientSpy).writeRequested(any(ChannelHandlerContext.class), any(MessageEvent.class)); childWrite.verify(clientSpy).flushRequested(any(ChannelHandlerContext.class), any(FlushEvent.class)); childWrite.verify(clientSpy).closeRequested(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); // asynchronous verify(clientSpy).writeComplete(any(ChannelHandlerContext.class), any(WriteCompletionEvent.class)); verify(clientSpy).flushed(any(ChannelHandlerContext.class), any(FlushEvent.class)); InOrder childRead = inOrder(clientSpy); childRead.verify(clientSpy).messageReceived(any(ChannelHandlerContext.class), any(MessageEvent.class)); childRead.verify(clientSpy).channelClosed(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); InOrder childClose = inOrder(clientSpy); childClose.verify(clientSpy).channelDisconnected(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); childClose.verify(clientSpy).channelUnbound(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); childClose.verify(clientSpy).channelClosed(any(ChannelHandlerContext.class), any(ChannelStateEvent.class)); verifyNoMoreInteractions(clientSpy); } private static final class Message { public int typeId; public String payload; } }
package modules.overlays; import ingredients.bootstrap.RandomGroupBootstrapIngredient; import interfaces.Sizeable; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import messages.ConnectionRequestMessage; import messages.DisconnectMessage; import messages.Message; import messages.SeedNodeMultipleTargetsReplyMessage; import messages.swapLinks.OnlyInlinks; import messages.swapLinks.OnlyOutlinks; import messages.swapLinks.RandomWalk; import messages.swapLinks.SwitchOutlinks; import modules.P2PClient; import experiment.frameworks.NodeAddress; public class SwapLinks extends OverlayModule<Integer[]> { // for each neighbor we count its inlinks and outlinks final int wl; // walk length final long capacity; public SwapLinks(final P2PClient client, final int wl, final int groupSize, final long _minCapacity, final long _minUploadBandwidth, final Random r) { super(client, r); this.wl = wl; final long minCapacity = _minCapacity; final long minUploadBandwidth = _minUploadBandwidth; capacity = ((long) ((double) network.getUploadBandwidth() / (double) minUploadBandwidth) * minCapacity); addIngredient(new RandomGroupBootstrapIngredient(groupSize, new Random(r.nextLong())), client); } private void addLink(final NodeAddress n, final boolean isOutLink) { final Integer[] l = neighborNodes.get(n); if (l == null) { // final Integer[] stats = { 0, 0 }; neighborNodes.put(n, stats); } neighborNodes.get(n)[isOutLink ? 1 : 0]++; } private void addInlink(final NodeAddress n) { addLink(n, false); } private void addOutlink(final NodeAddress n) { addLink(n, true); } private void removeLink(final NodeAddress n, final boolean isOutLink) { final Integer[] l = neighborNodes.get(n); if (l != null) { // if there exists a link l[isOutLink ? 1 : 0]--; if (l[0] == 0 && l[1] == 0) { // if n is no longer a neighbor neighborNodes.remove(n); } } } private void removeOutlink(final NodeAddress n) { removeLink(n, true); } private void removeInlink(final NodeAddress n) { removeLink(n, false); } // only handles churn @Override public void nextCycle() { super.nextCycle(); updateNeighbors(); // printOutlinks(); } /** * removes dead links and finds new ones */ @Override protected void updateNeighbors() { final Set<NodeAddress> neighbors = neighborNodes.keySet(); final Collection<NodeAddress> neighborsToReestablish = new LinkedList<NodeAddress>(); for (final NodeAddress a : neighbors) { if (!neighbors.contains(a)) { continue; } if (network.isUp(a)) { continue; } neighborsToReestablish.add(a); } for (final NodeAddress a : neighborsToReestablish) { reestablishDeadLinks(a); } } private void printOutlinks() { // DEBUG System.out.print(network.getAddress().toString() + " to ["); for (final NodeAddress n : neighborNodes.keySet()) { if (neighborNodes.get(n)[1] != 0) { System.out.print(n.toString() + " , "); } } System.out.print("] ["); for (final NodeAddress n : neighborNodes.keySet()) { if (neighborNodes.get(n)[1] != 0) { System.out.print(neighborNodes.get(n)[1] + " , "); } } System.out.println("]"); } /** * removes the neighbor a and replaces the links * * @param a */ private void reestablishDeadLinks(final NodeAddress a) { if (!neighborNodes.containsKey(a)) { return; } final Integer[] links = neighborNodes.get(a); removeNeighbor(a); try { // works if the walk required to find new links is possible for (int i = 0; i < links[1]; i++) { // find new outlinks network.send(new OnlyInlinks(getMessageTag(), network.getAddress(), getRandomInlink(network.getAddress()), network .getAddress(), wl, OnlyInlinks.Algorithm.CHURN)); } for (int i = 0; i < links[0]; i++) { // find new inlinks network.send(new OnlyOutlinks(getMessageTag(), network.getAddress(), getRandomOutlink(network.getAddress()), network .getAddress(), wl)); } } catch (final NoSuchLink e) { // tell neighbors to disconnect me, and // then re-join for (final NodeAddress x : neighborNodes.keySet()) { network.send(new DisconnectMessage(getMessageTag(), network.getAddress(), x)); } reConnect(); } } @Override public void handleMessage(final Message message) { super.handleMessage(message); if (message instanceof ConnectionRequestMessage) { if (((SwapLinksInfo) ((ConnectionRequestMessage<?>) message).payload).isOutLink) { addInlink(message.sourceId); } else { addOutlink(message.sourceId); } return; } // joins to the graph if (message instanceof SeedNodeMultipleTargetsReplyMessage) { final SeedNodeMultipleTargetsReplyMessage msg = ((SeedNodeMultipleTargetsReplyMessage) message); final List<NodeAddress> initialNodes = new ArrayList<NodeAddress>(msg.targets); for (int i = 0; i < capacity; i++) { // initiate onlyInlinks (join) an // amount of times according to the // capacity final NodeAddress target = initialNodes.get(r.nextInt(initialNodes.size())); // start // the // walk // from // random // nodes network.send(new OnlyInlinks(getMessageTag(), network.getAddress(), target, network.getAddress(), wl, OnlyInlinks.Algorithm.JOIN)); } return; } if (message instanceof DisconnectMessage) { final DisconnectMessage msg = ((DisconnectMessage) message); reestablishDeadLinks(msg.sourceId); return; } if (message instanceof OnlyInlinks) { final OnlyInlinks msg = ((OnlyInlinks) message); if (msg.remainingSteps > 1) { // keep walking try { network.send(new OnlyInlinks(getMessageTag(), network.getAddress(), getRandomInlink(msg.origin), msg.origin, msg.remainingSteps - 1, msg.algo)); } catch (final NoSuchLink e) { // stop the walk if there is no valid // step onlyInlinksFinalStep(msg); } } else { // finished walking onlyInlinksFinalStep(msg); } return; } // Deals with the OnlyOutlinks random walk if (message instanceof OnlyOutlinks) { final OnlyOutlinks msg = ((OnlyOutlinks) message); if (msg.remainingSteps > 1) { try { // we still have more steps to go in the random walk so we keep going network.send(new OnlyOutlinks(getMessageTag(), network.getAddress(), getRandomOutlink(msg.origin), msg.origin, msg.remainingSteps - 1)); } catch (final NoSuchLink e) { // the current node has no outlinks so we end the walk here and make // the switch randomInlinkSwitchOutlinks(msg); } } else { // we finished the random walk successfully so we make the switch randomInlinkSwitchOutlinks(msg); } return; } // Instruct the node to switch outlinks from the source of the message to // some node if (message instanceof SwitchOutlinks) { final SwitchOutlinks msg = ((SwitchOutlinks) message); removeOutlink(msg.sourceId); addOutlink(msg.target); // notify the new outlink we have connected to it network.send(new ConnectionRequestMessage<SwapLinksInfo>(getMessageTag(), network.getAddress(), msg.target, new SwapLinksInfo(true))); return; } } // At the end of OnlyInlinks walk, we connect the origin to the end node private void onlyInlinksFinalStep(final OnlyInlinks msg) { addInlink(msg.origin); network.send(new ConnectionRequestMessage<SwapLinksInfo>(getMessageTag(), network.getAddress(), msg.origin, new SwapLinksInfo( false))); if (msg.algo == OnlyInlinks.Algorithm.JOIN) { // also if the walk was caused by JOIN algorithm // (and not because of CHURN, for example) // the origin steals an inlink from the end node randomInlinkSwitchOutlinks(msg); } } /** * Take a random inlink and make it switch outlinks from us to the origin of * the walk * * @param msg * A message of a certain random walk. could be OnlyInlinks or * OnlyOutlinks */ private void randomInlinkSwitchOutlinks(final RandomWalk msg) { try { final NodeAddress last = getRandomInlink(msg.origin); removeInlink(last); network.send(new SwitchOutlinks(getMessageTag(), network.getAddress(), last, msg.origin)); } catch (final NoSuchLink e) { addOutlink(msg.origin); network.send(new ConnectionRequestMessage<SwapLinksInfo>(getMessageTag(), network.getAddress(), msg.origin, new SwapLinksInfo(true))); } } class NoSuchLink extends Exception { /** * */ private static final long serialVersionUID = 1L; } // gets a type of link and returns a random node connected to this node // by the specified link type, that is different from the origin // Note that the node is random with bias according to amount of connections // we have // to that node private NodeAddress getRandomLink(final boolean isOut, final NodeAddress origin) throws NoSuchLink { // take all the nodes that are connected to this node with the specified // link type final List<NodeAddress> subSet = filter(neighborNodes, new Boolean(isOut)); while (subSet.remove(origin)) { // the loop is necessary because more than one copy of origin might be in // the list } if (subSet.isEmpty()) { // if no node qualified then we throw an exception throw new NoSuchLink(); } // we return a random node from the qualified set return subSet.get(r.nextInt(subSet.size())); } private NodeAddress getRandomInlink(final NodeAddress origin) throws NoSuchLink { return getRandomLink(false, origin); } private NodeAddress getRandomOutlink(final NodeAddress origin) throws NoSuchLink { return getRandomLink(true, origin); } // return all inlinked nodes or all outlinked nodes private static List<NodeAddress> filter(final Map<NodeAddress, Integer[]> fullMap, final Boolean filter) { final List<NodeAddress> subSet = new ArrayList<NodeAddress>(); for (final NodeAddress n : fullMap.keySet()) { final int nodeLinksNum = fullMap.get(n)[filter ? 1 : 0]; for (int i = 0; i < nodeLinksNum; i++) { // we insert the node to the list a number of times // according to the amount of connections to that node subSet.add(n); } } return subSet; } } class SwapLinksInfo implements Sizeable { public final boolean isOutLink; public SwapLinksInfo(final boolean isOutLink) { super(); this.isOutLink = isOutLink; } @Override public String toString() { return isOutLink ? "outlink" : "inlink"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (isOutLink ? 1231 : 1237); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SwapLinksInfo other = (SwapLinksInfo) obj; return (isOutLink == other.isOutLink); } @Override public long getSimulatedSize() { return Integer.SIZE; } }
// ---------------------------------------------------------------------------- // Copyright 2007-2013, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2010/04/11 Martin D. Flynn // -Initial release (cloned from StatusCodeInfo.java) // 2012/12/24 Martin D. Flynn // -Change "form" target to "_self" (rather than "_top") // ---------------------------------------------------------------------------- package org.opengts.war.track.page; import java.util.*; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.opengts.util.*; import org.opengts.dbtools.*; import org.opengts.db.*; import org.opengts.db.tables.*; import org.opengts.war.tools.*; import org.opengts.war.track.*; public class DriverInfo extends WebPageAdaptor implements Constants { // ------------------------------------------------------------------------ // Parameters // forms public static final String FORM_DRIVER_SELECT = "DriverInfoSelect"; public static final String FORM_DRIVER_EDIT = "DriverInfoEdit"; public static final String FORM_DRIVER_NEW = "DriverInfoNew"; // commands public static final String COMMAND_INFO_UPDATE = "update"; public static final String COMMAND_INFO_SELECT = "select"; public static final String COMMAND_INFO_NEW = "new"; // submit public static final String PARM_SUBMIT_EDIT = "c_subedit"; public static final String PARM_SUBMIT_VIEW = "c_subview"; public static final String PARM_SUBMIT_CHG = "c_subchg"; public static final String PARM_SUBMIT_DEL = "c_subdel"; public static final String PARM_SUBMIT_NEW = "c_subnew"; // buttons public static final String PARM_BUTTON_CANCEL = "u_btncan"; public static final String PARM_BUTTON_BACK = "u_btnbak"; // parameters public static final String PARM_NEW_NAME = "d_newid"; public static final String PARM_DRIVER_SELECT = "d_driver"; public static final String PARM_DRIVER_NAME = "d_fullnm"; public static final String PARM_INFORMAL_NAME = "d_nicknm"; public static final String PARM_CONTACT_PHONE = "d_phone"; public static final String PARM_CONTACT_EMAIL = "d_email"; public static final String PARM_LICENSE_TYPE = "d_lictype"; public static final String PARM_LICENSE_NUMBER = "d_license"; public static final String PARM_LICENSE_EXPIRE = "d_licexp"; public static final String PARM_BADGE_ID = "d_badge"; public static final String PARM_FULL_ADDRESS = "d_address"; public static final String PARM_BIRTHDATE = "d_birthdt"; // TODO public static final String PARM_DEVICE_ID = "d_devid"; // ------------------------------------------------------------------------ // WebPage interface public DriverInfo() { this.setBaseURI(RequestProperties.TRACK_BASE_URI()); this.setPageName(PAGE_DRIVER_INFO); this.setPageNavigation(new String[] { PAGE_LOGIN, PAGE_MENU_TOP }); this.setLoginRequired(true); } // ------------------------------------------------------------------------ public String getMenuName(RequestProperties reqState) { return MenuBar.MENU_ADMIN; } public String getMenuDescription(RequestProperties reqState, String parentMenuName) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(DriverInfo.class); return super._getMenuDescription(reqState,i18n.getString("DriverInfo.editMenuDesc","View/Edit Driver Information")); } public String getMenuHelp(RequestProperties reqState, String parentMenuName) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(DriverInfo.class); return super._getMenuHelp(reqState,i18n.getString("DriverInfo.editMenuHelp","View and Edit Driver information")); } // ------------------------------------------------------------------------ public String getNavigationDescription(RequestProperties reqState) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(DriverInfo.class); return super._getNavigationDescription(reqState,i18n.getString("DriverInfo.navDesc","Driver")); } public String getNavigationTab(RequestProperties reqState) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(DriverInfo.class); return i18n.getString("DriverInfo.navTab","Driver Admin"); } // ------------------------------------------------------------------------ private static long GetDayNumber(String ymd) { if (StringTools.isBlank(ymd)) { return 0L; } else { DayNumber dn = DayNumber.parseDayNumber(ymd); return (dn != null)? dn.getDayNumber() : -1L; } } private static String FormatDayNumber(long dn) { if (dn < 0L) { return ""; } else { return (new DayNumber(dn)).format(DayNumber.DATE_FORMAT_YMD_1); } } // ------------------------------------------------------------------------ private static String Filter(String s) { if (StringTools.isBlank(s)) { return "&nbsp;"; } else { return s; } } public void writePage( final RequestProperties reqState, String pageMsg) throws IOException { final HttpServletRequest request = reqState.getHttpServletRequest(); final PrivateLabel privLabel = reqState.getPrivateLabel(); // never null final I18N i18n = privLabel.getI18N(DriverInfo.class); final Locale locale = reqState.getLocale(); final Account currAcct = reqState.getCurrentAccount(); // never null final String currAcctID = currAcct.getAccountID(); // never null final User currUser = reqState.getCurrentUser(); // may be null final String pageName = this.getPageName(); String m = pageMsg; boolean error = false; /* List of drivers */ OrderedSet<String> driverList = null; try { driverList = Driver.getDriverIDsForAccount(currAcctID); } catch (DBException dbe) { driverList = new OrderedSet<String>(); } /* selected geozone */ String selDriverID = AttributeTools.getRequestString(reqState.getHttpServletRequest(), PARM_DRIVER_SELECT, ""); if (!StringTools.isBlank(selDriverID) && !driverList.contains(selDriverID)) { selDriverID = ""; } /* driver db */ Driver selDriver = null; try { selDriver = !selDriverID.equals("")? Driver.getDriver(currAcct, selDriverID) : null; // may still be null } catch (DBException dbe) { // ignore } /* ACL allow edit/view */ boolean allowNew = privLabel.hasAllAccess(currUser, this.getAclName()); boolean allowDelete = allowNew; boolean allowEdit = allowNew || privLabel.hasWriteAccess(currUser, this.getAclName()); boolean allowView = allowEdit || privLabel.hasReadAccess(currUser, this.getAclName()); /* submit buttons */ String submitEdit = AttributeTools.getRequestString(request, PARM_SUBMIT_EDIT, ""); String submitView = AttributeTools.getRequestString(request, PARM_SUBMIT_VIEW, ""); String submitChange = AttributeTools.getRequestString(request, PARM_SUBMIT_CHG , ""); String submitNew = AttributeTools.getRequestString(request, PARM_SUBMIT_NEW , ""); String submitDelete = AttributeTools.getRequestString(request, PARM_SUBMIT_DEL , ""); /* command */ String driverCmd = reqState.getCommandName(); boolean selectDriver = driverCmd.equals(COMMAND_INFO_SELECT); boolean newDriver = driverCmd.equals(COMMAND_INFO_NEW); boolean updateDriver = driverCmd.equals(COMMAND_INFO_UPDATE); boolean deleteDriver = false; /* ui display */ boolean uiList = false; boolean uiEdit = false; boolean uiView = false; /* config */ final boolean showDeviceID = privLabel.getBooleanProperty(PrivateLabel.PROP_DriverInfo_showDeviceID,false); /* sub-command */ String newDriverID = null; if (newDriver) { if (!allowNew) { newDriver = false; // not authorized } else { newDriverID = AttributeTools.getRequestString(request,PARM_NEW_NAME,"").trim(); newDriverID = newDriverID.toLowerCase(); if (StringTools.isBlank(newDriverID)) { m = i18n.getString("DriverInfo.enterNewID","Please enter a valid new Driver ID."); // UserErrMsg error = true; newDriver = false; } else if (!WebPageAdaptor.isValidID(reqState,/*PrivateLabel.PROP_DriverInfo_validateNewIDs,*/newDriverID)) { m = i18n.getString("DriverInfo.invalidIDChar","ID contains invalid characters"); // UserErrMsg error = true; newDriver = false; } } } else if (updateDriver) { if (!allowEdit) { // not authorized to update drivers updateDriver = false; } else if (!SubmitMatch(submitChange,i18n.getString("DriverInfo.change","Change"))) { updateDriver = false; } else if (selDriver == null) { // should not occur m = i18n.getString("DriverInfo.unableToUpdate","Unable to update Driver, ID not found"); // UserErrMsg error = true; updateDriver = false; } } else if (selectDriver) { if (SubmitMatch(submitDelete,i18n.getString("DriverInfo.delete","Delete"))) { if (!allowDelete) { deleteDriver = false; } else if (selDriver == null) { m = i18n.getString("DriverInfo.pleaseSelectDriver","Please select a Driver"); // UserErrMsg error = true; deleteDriver = false; // not selected } else { deleteDriver = true; } } else if (SubmitMatch(submitEdit,i18n.getString("DriverInfo.edit","Edit"))) { if (!allowEdit) { uiEdit = false; // not authorized } else if (selDriver == null) { m = i18n.getString("DriverInfo.pleaseSelectDriver","Please select a Driver"); // UserErrMsg error = true; uiEdit = false; // not selected } else { uiEdit = true; } } else if (SubmitMatch(submitView,i18n.getString("DriverInfo.view","View"))) { if (!allowView) { uiView = false; // not authorized } else if (selDriver == null) { m = i18n.getString("DriverInfo.pleaseSelectDriver","Please select a Driver"); // UserErrMsg error = true; uiView = false; // not selected } else { uiView = true; } } else { uiList = true; } } else { uiList = true; } /* delete Driver? */ if (deleteDriver) { try { Driver.Key driverKey = (Driver.Key)selDriver.getRecordKey(); Print.logWarn("Deleting Driver: " + driverKey); driverKey.delete(true); // will also delete dependencies selDriverID = ""; selDriver = null; // select another driver driverList = Driver.getDriverIDsForAccount(currAcctID); if (!ListTools.isEmpty(driverList)) { selDriverID = driverList.get(0); try { selDriver = !selDriverID.equals("")? Driver.getDriver(currAcct,selDriverID) : null; // may still be null } catch (DBException dbe) { // ignore } } } catch (DBException dbe) { Print.logException("Deleting Driver", dbe); m = i18n.getString("DriverInfo.errorDelete","Internal error deleting Driver"); // UserErrMsg error = true; } uiList = true; } /* new Driver? */ if (newDriver) { boolean createDriverOK = true; for (int u = 0; u < driverList.size(); u++) { if (newDriverID.equalsIgnoreCase(driverList.get(u))) { m = i18n.getString("DriverInfo.alreadyExists","This Driver already exists"); // UserErrMsg error = true; createDriverOK = false; break; } } if (createDriverOK) { try { Driver driver = Driver.createNewDriver(currAcct, newDriverID); // already saved driverList = Driver.getDriverIDsForAccount(currAcctID); selDriver = driver; selDriverID = driver.getDriverID(); Print.logInfo("Created driver '%s'", selDriverID); m = i18n.getString("DriverInfo.createdDriver","New Driver has been created"); // UserErrMsg } catch (DBException dbe) { Print.logException("Creating Driver", dbe); m = i18n.getString("DriverInfo.errorCreate","Internal error creating Driver"); // UserErrMsg error = true; } } uiList = true; } /* change/update the Driver info? */ if (updateDriver) { selDriver.clearChanged(); String driverDesc = AttributeTools.getRequestString(request, PARM_DRIVER_NAME ,""); String nickName = AttributeTools.getRequestString(request, PARM_INFORMAL_NAME ,""); String contactPhone = AttributeTools.getRequestString(request, PARM_CONTACT_PHONE ,""); String contactEmail = AttributeTools.getRequestString(request, PARM_CONTACT_EMAIL ,""); String licenseType = AttributeTools.getRequestString(request, PARM_LICENSE_TYPE ,""); String licenseNumber = AttributeTools.getRequestString(request, PARM_LICENSE_NUMBER,""); String licenseExpire = AttributeTools.getRequestString(request, PARM_LICENSE_EXPIRE,""); String badgeID = AttributeTools.getRequestString(request, PARM_BADGE_ID ,""); String fullAddress = AttributeTools.getRequestString(request, PARM_FULL_ADDRESS ,""); String deviceID = showDeviceID? AttributeTools.getRequestString(request, PARM_DEVICE_ID, null) : null; //String birthDate = AttributeTools.getRequestString(request, PARM_BIRTHDATE ,""); try { boolean saveOK = true; // description if (!driverDesc.equals(selDriver.getDescription())) { selDriver.setDescription(driverDesc); } // mickname if (!nickName.equals(selDriver.getDisplayName())) { selDriver.setDisplayName(nickName); } // contact if (!contactPhone.equals(selDriver.getContactPhone())) { selDriver.setContactPhone(contactPhone); } if (!contactEmail.equals(selDriver.getContactEmail())) { selDriver.setContactEmail(contactEmail); } // license if (!licenseType.equals(selDriver.getLicenseType())) { selDriver.setLicenseType(licenseType); } if (!licenseNumber.equals(selDriver.getLicenseNumber())) { selDriver.setLicenseNumber(licenseNumber); } long licenseExpDN = GetDayNumber(licenseExpire); if ((licenseExpDN >= 0L) && (licenseExpDN != selDriver.getLicenseExpire())) { selDriver.setLicenseExpire(licenseExpDN); } // badge if (!badgeID.equals(selDriver.getBadgeID())) { selDriver.setBadgeID(badgeID); } // address if (!fullAddress.equals(selDriver.getAddress())) { selDriver.setAddress(fullAddress); } // deviceID if (showDeviceID && (deviceID != null) && !deviceID.equals(selDriver.getDeviceID())) { selDriver.setDeviceID(deviceID); } // save if (saveOK) { selDriver.save(); m = i18n.getString("DriverInfo.driverUpdated","Driver information updated"); // UserErrMsg } else { // should stay on this page } } catch (Throwable t) { Print.logException("Updating Driver", t); m = i18n.getString("DriverInfo.errorUpdate","Internal error updating Driver"); // UserErrMsg error = true; //return; } uiList = true; } /* Style */ HTMLOutput HTML_CSS = new HTMLOutput() { public void write(PrintWriter out) throws IOException { String cssDir = DriverInfo.this.getCssDirectory(); WebPageAdaptor.writeCssLink(out, reqState, "DriverInfo.css", cssDir); } }; /* JavaScript */ HTMLOutput HTML_JS = new HTMLOutput() { public void write(PrintWriter out) throws IOException { MenuBar.writeJavaScript(out, pageName, reqState); JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef(SORTTABLE_JS), request); } }; /* Content */ final OrderedSet<String> _driverList = driverList; final Driver _selDriver = selDriver; final boolean _allowEdit = allowEdit; final boolean _allowView = allowView; final boolean _allowDelete = allowDelete; final boolean _allowNew = allowNew; final boolean _uiEdit = _allowEdit && uiEdit; final boolean _uiView = _uiEdit || uiView; final boolean _uiList = uiList || (!_uiEdit && !_uiView); HTMLOutput HTML_CONTENT = null; if (_uiList) { final String _selDriverID = (selDriverID.equals("") && (driverList.size() > 0))? driverList.get(0) : selDriverID; HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) { public void write(PrintWriter out) throws IOException { String pageName = DriverInfo.this.getPageName(); // frame header //String menuURL = EncodeMakeURL(reqState,RequestProperties.TRACK_BASE_URI(),PAGE_MENU_TOP); String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP); String editURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String selectURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String newURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String frameTitle = _allowEdit? i18n.getString("DriverInfo.viewEditDriver","View/Edit Driver Information") : i18n.getString("DriverInfo.viewDriver","View Driver Information"); out.write("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>\n"); out.write("<hr>\n"); // DriverInfo selection table (Select, DriverInfo ID, DriverInfo Name) out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("DriverInfo.selectDriver","Select a Driver")+":</h1>\n"); out.write("<div style='margin-left:25px;'>\n"); out.write("<form name='"+FORM_DRIVER_SELECT+"' method='post' action='"+selectURL+"' target='_self'>"); // target='_top' out.write("<input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_SELECT+"'/>"); out.write("<table class='"+CommonServlet.CSS_ADMIN_SELECT_TABLE+"' cellspacing=0 cellpadding=0 border=0>\n"); out.write(" <thead>\n"); out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_ROW+"'>\n"); out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL_SEL+"' nowrap>"+i18n.getString("DriverInfo.select","Select")+"</th>\n"); out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+i18n.getString("DriverInfo.driverID","Driver ID")+"</th>\n"); out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+i18n.getString("DriverInfo.description","Description")+"</th>\n"); out.write(" </tr>\n"); out.write(" </thead>\n"); out.write(" <tbody>\n"); for (int u = 0; u < _driverList.size(); u++) { String drid = _driverList.get(u); if ((u & 1) == 0) { out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_ODD+"'>\n"); } else { out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_EVEN+"'>\n"); } try { Driver driver = Driver.getDriver(currAcct, drid); if (driver != null) { String driverID = driver.getDriverID(); String driverDesc = Filter(driver.getDescription()); String checked = _selDriverID.equals(driver.getDriverID())? "checked" : ""; out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL_SEL+"' "+SORTTABLE_SORTKEY+"='"+u+"'><input type='radio' name='"+PARM_DRIVER_SELECT+"' id='"+driverID+"' value='"+driverID+"' "+checked+"></td>\n"); out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap><label for='"+driverID+"'>"+driverID+"</label></td>\n"); out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+driverDesc+"</td>\n"); } } catch (DBException dbe) { // } out.write(" </tr>\n"); } out.write(" </tbody>\n"); out.write("</table>\n"); out.write("<table cellpadding='0' cellspacing='0' border='0' style='width:95%; margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n"); out.write("<tr>\n"); if (_allowView ) { out.write("<td style='padding-left:5px;'>"); out.write("<input type='submit' name='"+PARM_SUBMIT_VIEW+"' value='"+i18n.getString("DriverInfo.view","View")+"'>"); out.write("</td>\n"); } if (_allowEdit ) { out.write("<td style='padding-left:5px;'>"); out.write("<input type='submit' name='"+PARM_SUBMIT_EDIT+"' value='"+i18n.getString("DriverInfo.edit","Edit")+"'>"); out.write("</td>\n"); } out.write("<td style='width:100%; text-align:right; padding-right:10px;'>"); if (_allowDelete) { out.write("<input type='submit' name='"+PARM_SUBMIT_DEL+"' value='"+i18n.getString("DriverInfo.delete","Delete")+"' "+Onclick_ConfirmDelete(locale)+">"); } else { out.write("&nbsp;"); } out.write("</td>\n"); out.write("</tr>\n"); out.write("</table>\n"); out.write("</form>\n"); out.write("</div>\n"); out.write("<hr>\n"); /* new Driver */ if (_allowNew) { out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("DriverInfo.createNewDriver","Create a new Driver")+":</h1>\n"); out.write("<div style='margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n"); out.write("<form name='"+FORM_DRIVER_NEW+"' method='post' action='"+newURL+"' target='_self'>"); // target='_top' out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_NEW+"'/>"); out.write(i18n.getString("DriverInfo.driverID","Driver ID")+": <input type='text' class='"+CommonServlet.CSS_TEXT_INPUT+"' name='"+PARM_NEW_NAME+"' value='' size='32' maxlength='32'><br>\n"); out.write(" <input type='submit' name='"+PARM_SUBMIT_NEW+"' value='"+i18n.getString("DriverInfo.new","New")+"' style='margin-top:5px; margin-left:10px;'>\n"); out.write("</form>\n"); out.write("</div>\n"); out.write("<hr>\n"); } } }; } else if (_uiEdit || _uiView) { final String _selDriverID = selDriverID; HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) { public void write(PrintWriter out) throws IOException { String pageName = DriverInfo.this.getPageName(); // frame header //String menuURL = EncodeMakeURL(reqState,RequestProperties.TRACK_BASE_URI(),PAGE_MENU_TOP); String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP); String editURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String selectURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String newURL = DriverInfo.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI()); String frameTitle = _allowEdit? i18n.getString("DriverInfo.viewEditDriver","View/Edit Driver Information") : i18n.getString("DriverInfo.viewDriver","View Driver Information"); out.write("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>\n"); out.write("<hr>\n"); /* start of form */ out.write("<form name='"+FORM_DRIVER_EDIT+"' method='post' action='"+editURL+"' target='_self'>\n"); // target='_top' out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_UPDATE+"'/>\n"); /* Driver fields */ out.println("<table class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE+"' cellspacing='0' callpadding='0' border='0'>"); out.println(FormRow_TextField(PARM_DRIVER_SELECT , false , i18n.getString("DriverInfo.driverID","Driver ID")+":" , _selDriverID, 8, 8)); out.println(FormRow_TextField(PARM_DRIVER_NAME , _uiEdit, i18n.getString("DriverInfo.driverName","Driver Name")+":" , (_selDriver!=null)?_selDriver.getDescription() :"", 50, 80)); out.println(FormRow_TextField(PARM_INFORMAL_NAME , _uiEdit, i18n.getString("DriverInfo.informalName","Nickname")+":" , (_selDriver!=null)?_selDriver.getDisplayName() :"", 15, 40)); out.println(FormRow_TextField(PARM_CONTACT_PHONE , _uiEdit, i18n.getString("DriverInfo.contactPhone","Contact Phone")+":" , (_selDriver!=null)?_selDriver.getContactPhone() :"", 24, 32)); out.println(FormRow_TextField(PARM_CONTACT_EMAIL , _uiEdit, i18n.getString("DriverInfo.contactEmail","Contact EMail")+":" , (_selDriver!=null)?_selDriver.getContactEmail() :"", 40, 80)); out.println(FormRow_TextField(PARM_BADGE_ID , _uiEdit, i18n.getString("DriverInfo.badgeID","Badge ID")+":" , (_selDriver!=null)?_selDriver.getBadgeID() :"", 32, 32)); out.println(FormRow_Separator()); out.println(FormRow_TextField(PARM_LICENSE_TYPE , _uiEdit, i18n.getString("DriverInfo.licenseType","License Type")+":" , (_selDriver!=null)?_selDriver.getLicenseType() :"", 24, 24)); out.println(FormRow_TextField(PARM_LICENSE_NUMBER , _uiEdit, i18n.getString("DriverInfo.licenseNumber","License Number")+":" , (_selDriver!=null)?_selDriver.getLicenseNumber():"", 32, 32)); out.println(FormRow_TextField(PARM_LICENSE_EXPIRE , _uiEdit, i18n.getString("DriverInfo.licenseExpire","License Expiration")+":", (_selDriver!=null)?FormatDayNumber(_selDriver.getLicenseExpire()):"",13,13, i18n.getString("DriverInfo.dateYMD","(yyyy/mm/dd)"))); out.println(FormRow_Separator()); out.println(FormRow_TextField(PARM_FULL_ADDRESS , _uiEdit, i18n.getString("DriverInfo.fullAddress","Address")+":" , (_selDriver!=null)?_selDriver.getAddress() :"", 60, 90)); if (showDeviceID) { String devTitles[] = reqState.getDeviceTitles(); String selDevID = (_selDriver != null)? _selDriver.getDeviceID() : ""; ComboMap devMap = new ComboMap(reqState.createDeviceDescriptionMap(true/*includeID*/)); devMap.insert("", i18n.getString("DriverInfo.noDevice","None",devTitles)); out.println(FormRow_Separator()); out.println(FormRow_ComboBox (PARM_DEVICE_ID, _uiEdit, i18n.getString("DriverInfo.deviceID","{0} ID",devTitles)+":", selDevID, devMap, "", -1)); } out.println("</table>"); /* end of form */ out.write("<hr style='margin-bottom:5px;'>\n"); out.write("<span style='padding-left:10px'>&nbsp;</span>\n"); if (_uiEdit) { out.write("<input type='submit' name='"+PARM_SUBMIT_CHG+"' value='"+i18n.getString("DriverInfo.change","Change")+"'>\n"); out.write("<span style='padding-left:10px'>&nbsp;</span>\n"); out.write("<input type='button' name='"+PARM_BUTTON_CANCEL+"' value='"+i18n.getString("DriverInfo.cancel","Cancel")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n"); } else { out.write("<input type='button' name='"+PARM_BUTTON_BACK+"' value='"+i18n.getString("DriverInfo.back","Back")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n"); } out.write("</form>\n"); } }; } /* write frame */ String onload = error? JS_alert(true,m) : null; CommonServlet.writePageFrame( reqState, onload,null, // onLoad/onUnload HTML_CSS, // Style sheets HTML_JS, // Javascript null, // Navigation HTML_CONTENT); // Content } // ------------------------------------------------------------------------ }
/** */ package etlMetaModel.impl; import etlMetaModel.EolAdditiveExpression; import etlMetaModel.EolChainedAdditiveExpression; import etlMetaModel.EolMultiplicativeExpression; import etlMetaModel.EtlMetaModelPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Eol Additive Expression</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link etlMetaModel.impl.EolAdditiveExpressionImpl#getMultiplicativeExpression <em>Multiplicative Expression</em>}</li> * <li>{@link etlMetaModel.impl.EolAdditiveExpressionImpl#getChainedAdditiveExpression <em>Chained Additive Expression</em>}</li> * </ul> * </p> * * @generated */ public class EolAdditiveExpressionImpl extends MinimalEObjectImpl.Container implements EolAdditiveExpression { /** * The cached value of the '{@link #getMultiplicativeExpression() <em>Multiplicative Expression</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMultiplicativeExpression() * @generated * @ordered */ protected EolMultiplicativeExpression multiplicativeExpression; /** * The cached value of the '{@link #getChainedAdditiveExpression() <em>Chained Additive Expression</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getChainedAdditiveExpression() * @generated * @ordered */ protected EolChainedAdditiveExpression chainedAdditiveExpression; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAdditiveExpressionImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EtlMetaModelPackage.Literals.EOL_ADDITIVE_EXPRESSION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMultiplicativeExpression getMultiplicativeExpression() { return multiplicativeExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMultiplicativeExpression(EolMultiplicativeExpression newMultiplicativeExpression, NotificationChain msgs) { EolMultiplicativeExpression oldMultiplicativeExpression = multiplicativeExpression; multiplicativeExpression = newMultiplicativeExpression; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION, oldMultiplicativeExpression, newMultiplicativeExpression); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMultiplicativeExpression(EolMultiplicativeExpression newMultiplicativeExpression) { if (newMultiplicativeExpression != multiplicativeExpression) { NotificationChain msgs = null; if (multiplicativeExpression != null) msgs = ((InternalEObject)multiplicativeExpression).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION, null, msgs); if (newMultiplicativeExpression != null) msgs = ((InternalEObject)newMultiplicativeExpression).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION, null, msgs); msgs = basicSetMultiplicativeExpression(newMultiplicativeExpression, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION, newMultiplicativeExpression, newMultiplicativeExpression)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedAdditiveExpression getChainedAdditiveExpression() { return chainedAdditiveExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetChainedAdditiveExpression(EolChainedAdditiveExpression newChainedAdditiveExpression, NotificationChain msgs) { EolChainedAdditiveExpression oldChainedAdditiveExpression = chainedAdditiveExpression; chainedAdditiveExpression = newChainedAdditiveExpression; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION, oldChainedAdditiveExpression, newChainedAdditiveExpression); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setChainedAdditiveExpression(EolChainedAdditiveExpression newChainedAdditiveExpression) { if (newChainedAdditiveExpression != chainedAdditiveExpression) { NotificationChain msgs = null; if (chainedAdditiveExpression != null) msgs = ((InternalEObject)chainedAdditiveExpression).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION, null, msgs); if (newChainedAdditiveExpression != null) msgs = ((InternalEObject)newChainedAdditiveExpression).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION, null, msgs); msgs = basicSetChainedAdditiveExpression(newChainedAdditiveExpression, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION, newChainedAdditiveExpression, newChainedAdditiveExpression)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION: return basicSetMultiplicativeExpression(null, msgs); case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION: return basicSetChainedAdditiveExpression(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION: return getMultiplicativeExpression(); case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION: return getChainedAdditiveExpression(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION: setMultiplicativeExpression((EolMultiplicativeExpression)newValue); return; case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION: setChainedAdditiveExpression((EolChainedAdditiveExpression)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION: setMultiplicativeExpression((EolMultiplicativeExpression)null); return; case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION: setChainedAdditiveExpression((EolChainedAdditiveExpression)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__MULTIPLICATIVE_EXPRESSION: return multiplicativeExpression != null; case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION__CHAINED_ADDITIVE_EXPRESSION: return chainedAdditiveExpression != null; } return super.eIsSet(featureID); } } //EolAdditiveExpressionImpl
/* * Copyright 2014 Hieu Rocker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rockerhieu.emojicon.emoji; /** * @author Hieu Rocker (rockerhieu@gmail.com) */ public class Symbols { public static final Emojicon[] DATA = new Emojicon[]{ Emojicon.fromChars("\u0031\u20e3"), Emojicon.fromChars("\u0032\u20e3"), Emojicon.fromChars("\u0033\u20e3"), Emojicon.fromChars("\u0034\u20e3"), Emojicon.fromChars("\u0035\u20e3"), Emojicon.fromChars("\u0036\u20e3"), Emojicon.fromChars("\u0037\u20e3"), Emojicon.fromChars("\u0038\u20e3"), Emojicon.fromChars("\u0039\u20e3"), Emojicon.fromChars("\u0030\u20e3"), Emojicon.fromCodePoint(0x1f51f), Emojicon.fromCodePoint(0x1f522), Emojicon.fromChars("\u0023\u20e3"), Emojicon.fromCodePoint(0x1f523), Emojicon.fromChar((char) 0x2b06), Emojicon.fromChar((char) 0x2b07), Emojicon.fromChar((char) 0x2b05), Emojicon.fromChar((char) 0x27a1), Emojicon.fromCodePoint(0x1f520), Emojicon.fromCodePoint(0x1f521), Emojicon.fromCodePoint(0x1f524), Emojicon.fromChar((char) 0x2197), Emojicon.fromChar((char) 0x2196), Emojicon.fromChar((char) 0x2198), Emojicon.fromChar((char) 0x2199), Emojicon.fromChar((char) 0x2194), Emojicon.fromChar((char) 0x2195), Emojicon.fromCodePoint(0x1f504), Emojicon.fromChar((char) 0x25c0), Emojicon.fromChar((char) 0x25b6), Emojicon.fromCodePoint(0x1f53c), Emojicon.fromCodePoint(0x1f53d), Emojicon.fromChar((char) 0x21a9), Emojicon.fromChar((char) 0x21aa), Emojicon.fromChar((char) 0x2139), Emojicon.fromChar((char) 0x23ea), Emojicon.fromChar((char) 0x23e9), Emojicon.fromChar((char) 0x23eb), Emojicon.fromChar((char) 0x23ec), Emojicon.fromChar((char) 0x2935), Emojicon.fromChar((char) 0x2934), Emojicon.fromCodePoint(0x1f197), Emojicon.fromCodePoint(0x1f500), Emojicon.fromCodePoint(0x1f501), Emojicon.fromCodePoint(0x1f502), Emojicon.fromCodePoint(0x1f195), Emojicon.fromCodePoint(0x1f199), Emojicon.fromCodePoint(0x1f192), Emojicon.fromCodePoint(0x1f193), Emojicon.fromCodePoint(0x1f196), Emojicon.fromCodePoint(0x1f4f6), Emojicon.fromCodePoint(0x1f3a6), Emojicon.fromCodePoint(0x1f201), Emojicon.fromCodePoint(0x1f22f), Emojicon.fromCodePoint(0x1f233), Emojicon.fromCodePoint(0x1f235), Emojicon.fromCodePoint(0x1f234), Emojicon.fromCodePoint(0x1f232), Emojicon.fromCodePoint(0x1f250), Emojicon.fromCodePoint(0x1f239), Emojicon.fromCodePoint(0x1f23a), Emojicon.fromCodePoint(0x1f236), Emojicon.fromCodePoint(0x1f21a), Emojicon.fromCodePoint(0x1f6bb), Emojicon.fromCodePoint(0x1f6b9), Emojicon.fromCodePoint(0x1f6ba), Emojicon.fromCodePoint(0x1f6bc), Emojicon.fromCodePoint(0x1f6be), Emojicon.fromCodePoint(0x1f6b0), Emojicon.fromCodePoint(0x1f6ae), Emojicon.fromCodePoint(0x1f17f), Emojicon.fromChar((char) 0x267f), Emojicon.fromCodePoint(0x1f6ad), Emojicon.fromCodePoint(0x1f237), Emojicon.fromCodePoint(0x1f238), Emojicon.fromCodePoint(0x1f202), Emojicon.fromChar((char) 0x24c2), Emojicon.fromCodePoint(0x1f6c2), Emojicon.fromCodePoint(0x1f6c4), Emojicon.fromCodePoint(0x1f6c5), Emojicon.fromCodePoint(0x1f6c3), Emojicon.fromCodePoint(0x1f251), Emojicon.fromChar((char) 0x3299), Emojicon.fromChar((char) 0x3297), Emojicon.fromCodePoint(0x1f191), Emojicon.fromCodePoint(0x1f198), Emojicon.fromCodePoint(0x1f194), Emojicon.fromCodePoint(0x1f6ab), Emojicon.fromCodePoint(0x1f51e), Emojicon.fromCodePoint(0x1f4f5), Emojicon.fromCodePoint(0x1f6af), Emojicon.fromCodePoint(0x1f6b1), Emojicon.fromCodePoint(0x1f6b3), Emojicon.fromCodePoint(0x1f6b7), Emojicon.fromCodePoint(0x1f6b8), Emojicon.fromChar((char) 0x26d4), Emojicon.fromChar((char) 0x2733), Emojicon.fromChar((char) 0x2747), Emojicon.fromChar((char) 0x274e), Emojicon.fromChar((char) 0x2705), Emojicon.fromChar((char) 0x2734), Emojicon.fromCodePoint(0x1f49f), Emojicon.fromCodePoint(0x1f19a), Emojicon.fromCodePoint(0x1f4f3), Emojicon.fromCodePoint(0x1f4f4), Emojicon.fromCodePoint(0x1f170), Emojicon.fromCodePoint(0x1f171), Emojicon.fromCodePoint(0x1f18e), Emojicon.fromCodePoint(0x1f17e), Emojicon.fromCodePoint(0x1f4a0), Emojicon.fromChar((char) 0x27bf), Emojicon.fromChar((char) 0x267b), Emojicon.fromChar((char) 0x2648), Emojicon.fromChar((char) 0x2649), Emojicon.fromChar((char) 0x264a), Emojicon.fromChar((char) 0x264b), Emojicon.fromChar((char) 0x264c), Emojicon.fromChar((char) 0x264d), Emojicon.fromChar((char) 0x264e), Emojicon.fromChar((char) 0x264f), Emojicon.fromChar((char) 0x2650), Emojicon.fromChar((char) 0x2651), Emojicon.fromChar((char) 0x2652), Emojicon.fromChar((char) 0x2653), Emojicon.fromChar((char) 0x26ce), Emojicon.fromCodePoint(0x1f52f), Emojicon.fromCodePoint(0x1f3e7), Emojicon.fromCodePoint(0x1f4b9), Emojicon.fromCodePoint(0x1f4b2), Emojicon.fromCodePoint(0x1f4b1), // Emoji.fromChar((char)0x00a9), // Emoji.fromChar((char)0x00ae), Emojicon.fromChar((char) 0xe24e), Emojicon.fromChar((char) 0xe24f), Emojicon.fromChar((char) 0x2122), Emojicon.fromChar((char) 0x274c), Emojicon.fromChar((char) 0x203c), Emojicon.fromChar((char) 0x2049), Emojicon.fromChar((char) 0x2757), Emojicon.fromChar((char) 0x2753), Emojicon.fromChar((char) 0x2755), Emojicon.fromChar((char) 0x2754), Emojicon.fromChar((char) 0x2b55), Emojicon.fromCodePoint(0x1f51d), Emojicon.fromCodePoint(0x1f51a), Emojicon.fromCodePoint(0x1f519), Emojicon.fromCodePoint(0x1f51b), Emojicon.fromCodePoint(0x1f51c), Emojicon.fromCodePoint(0x1f503), Emojicon.fromCodePoint(0x1f55b), Emojicon.fromCodePoint(0x1f567), Emojicon.fromCodePoint(0x1f550), Emojicon.fromCodePoint(0x1f55c), Emojicon.fromCodePoint(0x1f551), Emojicon.fromCodePoint(0x1f55d), Emojicon.fromCodePoint(0x1f552), Emojicon.fromCodePoint(0x1f55e), Emojicon.fromCodePoint(0x1f553), Emojicon.fromCodePoint(0x1f55f), Emojicon.fromCodePoint(0x1f554), Emojicon.fromCodePoint(0x1f560), Emojicon.fromCodePoint(0x1f555), Emojicon.fromCodePoint(0x1f556), Emojicon.fromCodePoint(0x1f557), Emojicon.fromCodePoint(0x1f558), Emojicon.fromCodePoint(0x1f559), Emojicon.fromCodePoint(0x1f55a), Emojicon.fromCodePoint(0x1f561), Emojicon.fromCodePoint(0x1f562), Emojicon.fromCodePoint(0x1f563), Emojicon.fromCodePoint(0x1f564), Emojicon.fromCodePoint(0x1f565), Emojicon.fromCodePoint(0x1f566), Emojicon.fromChar((char) 0x2716), Emojicon.fromChar((char) 0x2795), Emojicon.fromChar((char) 0x2796), Emojicon.fromChar((char) 0x2797), Emojicon.fromChar((char) 0x2660), Emojicon.fromChar((char) 0x2665), Emojicon.fromChar((char) 0x2663), Emojicon.fromChar((char) 0x2666), Emojicon.fromCodePoint(0x1f4ae), Emojicon.fromCodePoint(0x1f4af), Emojicon.fromChar((char) 0x2714), Emojicon.fromChar((char) 0x2611), Emojicon.fromCodePoint(0x1f518), Emojicon.fromCodePoint(0x1f517), Emojicon.fromChar((char) 0x27b0), Emojicon.fromChar((char) 0x3030), Emojicon.fromChar((char) 0x303d), Emojicon.fromCodePoint(0x1f531), Emojicon.fromChar((char) 0x25fc), Emojicon.fromChar((char) 0x25fb), Emojicon.fromChar((char) 0x25fe), Emojicon.fromChar((char) 0x25fd), Emojicon.fromChar((char) 0x25aa), Emojicon.fromChar((char) 0x25ab), Emojicon.fromCodePoint(0x1f53a), Emojicon.fromCodePoint(0x1f532), Emojicon.fromCodePoint(0x1f533), Emojicon.fromChar((char) 0x26ab), Emojicon.fromChar((char) 0x26aa), Emojicon.fromCodePoint(0x1f534), Emojicon.fromCodePoint(0x1f535), Emojicon.fromCodePoint(0x1f53b), Emojicon.fromChar((char) 0x2b1c), Emojicon.fromChar((char) 0x2b1b), Emojicon.fromCodePoint(0x1f536), Emojicon.fromCodePoint(0x1f537), Emojicon.fromCodePoint(0x1f538), Emojicon.fromCodePoint(0x1f539), }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cli; import java.util.HashMap; import java.util.Map; import com.google.common.base.Joiner; import org.apache.commons.cli.*; import org.apache.cassandra.config.EncryptionOptions; import org.apache.cassandra.thrift.ITransportFactory; import org.apache.cassandra.thrift.SSLTransportFactory; /** * * Used to process, and act upon the arguments passed to the CLI. * */ public class CliOptions { private static final CLIOptions options; // Info about command line options // Name of the command line tool (used for error messages) private static final String TOOL_NAME = "cassandra-cli"; // Command line options private static final String HOST_OPTION = "host"; private static final String PORT_OPTION = "port"; private static final String TRANSPORT_FACTORY = "transport-factory"; private static final String DEBUG_OPTION = "debug"; private static final String USERNAME_OPTION = "username"; private static final String PASSWORD_OPTION = "password"; private static final String KEYSPACE_OPTION = "keyspace"; private static final String BATCH_OPTION = "batch"; private static final String HELP_OPTION = "help"; private static final String FILE_OPTION = "file"; private static final String JMX_PORT_OPTION = "jmxport"; private static final String JMX_USERNAME_OPTION = "jmxusername"; private static final String JMX_PASSWORD_OPTION = "jmxpassword"; private static final String VERBOSE_OPTION = "verbose"; private static final String SSL_TRUSTSTORE = "truststore"; private static final String SSL_TRUSTSTORE_PW = "truststore-password"; private static final String SSL_PROTOCOL = "ssl-protocol"; private static final String SSL_ALGORITHM = "ssl-alg"; private static final String SSL_STORE_TYPE = "store-type"; private static final String SSL_CIPHER_SUITES = "ssl-ciphers"; // Default values for optional command line arguments private static final String DEFAULT_HOST = "127.0.0.1"; private static final int DEFAULT_THRIFT_PORT = 9160; // Register the command line options and their properties (such as // whether they take an extra argument, etc. static { options = new CLIOptions(); options.addOption("h", HOST_OPTION, "HOSTNAME", "cassandra server's host name"); options.addOption("p", PORT_OPTION, "PORT", "cassandra server's thrift port"); options.addOption("u", USERNAME_OPTION, "USERNAME", "user name for cassandra authentication"); options.addOption("pw", PASSWORD_OPTION, "PASSWORD", "password for cassandra authentication"); options.addOption("k", KEYSPACE_OPTION, "KEYSPACE", "cassandra keyspace user is authenticated against"); options.addOption("f", FILE_OPTION, "FILENAME", "load statements from the specific file"); options.addOption(null, JMX_PORT_OPTION, "JMX-PORT", "JMX service port"); options.addOption(null, JMX_USERNAME_OPTION, "JMX-USERNAME", "JMX service username"); options.addOption(null, JMX_PASSWORD_OPTION, "JMX-PASSWORD", "JMX service password"); options.addOption("tf", TRANSPORT_FACTORY, "TRANSPORT-FACTORY", "Fully-qualified ITransportFactory class name for creating a connection to cassandra"); // ssl connection-related options options.addOption("ts", SSL_TRUSTSTORE, "TRUSTSTORE", "SSL: full path to truststore"); options.addOption("tspw", SSL_TRUSTSTORE_PW, "TRUSTSTORE-PASSWORD", "SSL: password of the truststore"); options.addOption("prtcl", SSL_PROTOCOL, "PROTOCOL", "SSL: connections protocol to use (default: TLS)"); options.addOption("alg", SSL_ALGORITHM, "ALGORITHM", "SSL: algorithm (default: SunX509)"); options.addOption("st", SSL_STORE_TYPE, "STORE-TYPE", "SSL: type of store"); options.addOption("ciphers", SSL_CIPHER_SUITES, "CIPHER-SUITES", "SSL: comma-separated list of encryption suites to use"); // options without argument options.addOption("B", BATCH_OPTION, "enabled batch mode (suppress output; errors are fatal)"); options.addOption(null, DEBUG_OPTION, "display stack-traces (NOTE: We print strack-traces in the places where it makes sense even without --debug)"); options.addOption("?", HELP_OPTION, "usage help"); options.addOption("v", VERBOSE_OPTION, "verbose output when using batch mode"); } private static void printUsage() { new HelpFormatter().printHelp(TOOL_NAME, options); } public void processArgs(CliSessionState css, String[] args) { CommandLineParser parser = new GnuParser(); try { CommandLine cmd = parser.parse(options, args, false); if (cmd.hasOption(HOST_OPTION)) { css.hostName = cmd.getOptionValue(HOST_OPTION); } else { css.hostName = DEFAULT_HOST; } if (cmd.hasOption(DEBUG_OPTION)) { css.debug = true; } // Look for optional args. if (cmd.hasOption(PORT_OPTION)) { css.thriftPort = Integer.parseInt(cmd.getOptionValue(PORT_OPTION)); } else { css.thriftPort = DEFAULT_THRIFT_PORT; } // Look for authentication credentials (username and password) if (cmd.hasOption(USERNAME_OPTION)) { css.username = cmd.getOptionValue(USERNAME_OPTION); } if (cmd.hasOption(PASSWORD_OPTION)) { css.password = cmd.getOptionValue(PASSWORD_OPTION); } // Look for keyspace if (cmd.hasOption(KEYSPACE_OPTION)) { css.keyspace = cmd.getOptionValue(KEYSPACE_OPTION); } if (cmd.hasOption(BATCH_OPTION)) { css.batch = true; } if (cmd.hasOption(FILE_OPTION)) { css.filename = cmd.getOptionValue(FILE_OPTION); } if (cmd.hasOption(JMX_PORT_OPTION)) { css.jmxPort = Integer.parseInt(cmd.getOptionValue(JMX_PORT_OPTION)); } if (cmd.hasOption(JMX_USERNAME_OPTION)) { css.jmxUsername = cmd.getOptionValue(JMX_USERNAME_OPTION); } if (cmd.hasOption(JMX_PASSWORD_OPTION)) { css.jmxPassword = cmd.getOptionValue(JMX_PASSWORD_OPTION); } if (cmd.hasOption(HELP_OPTION)) { printUsage(); System.exit(1); } if (cmd.hasOption(VERBOSE_OPTION)) { css.verbose = true; } if(cmd.hasOption(SSL_TRUSTSTORE)) { css.encOptions.truststore = cmd.getOptionValue(SSL_TRUSTSTORE); } if(cmd.hasOption(SSL_TRUSTSTORE_PW)) { css.encOptions.truststore_password = cmd.getOptionValue(SSL_TRUSTSTORE_PW); } if(cmd.hasOption(SSL_PROTOCOL)) { css.encOptions.protocol = cmd.getOptionValue(SSL_PROTOCOL); } if(cmd.hasOption(SSL_ALGORITHM)) { css.encOptions.algorithm = cmd.getOptionValue(SSL_ALGORITHM); } if(cmd.hasOption(SSL_STORE_TYPE)) { css.encOptions.store_type = cmd.getOptionValue(SSL_STORE_TYPE); } if(cmd.hasOption(SSL_CIPHER_SUITES)) { css.encOptions.cipher_suites = cmd.getOptionValue(SSL_CIPHER_SUITES).split(","); } if (cmd.hasOption(TRANSPORT_FACTORY)) { css.transportFactory = validateAndSetTransportFactory(cmd.getOptionValue(TRANSPORT_FACTORY)); configureTransportFactory(css.transportFactory, css.encOptions); } // Abort if there are any unrecognized arguments left if (cmd.getArgs().length > 0) { System.err.printf("Unknown argument: %s%n", cmd.getArgs()[0]); System.err.println(); printUsage(); System.exit(1); } } catch (ParseException e) { System.err.println(e.getMessage()); System.err.println(); printUsage(); System.exit(1); } } private static class CLIOptions extends Options { /** * Add option with argument and argument name * @param opt shortcut for option name * @param longOpt complete option name * @param argName argument name * @param description description of the option * @return updated Options object */ public Options addOption(String opt, String longOpt, String argName, String description) { Option option = new Option(opt, longOpt, true, description); option.setArgName(argName); return addOption(option); } /** * Add option without argument * @param opt shortcut for option name * @param longOpt complete option name * @param description description of the option * @return updated Options object */ public Options addOption(String opt, String longOpt, String description) { return addOption(new Option(opt, longOpt, false, description)); } } private static ITransportFactory validateAndSetTransportFactory(String transportFactory) { try { Class<?> factory = Class.forName(transportFactory); if (!ITransportFactory.class.isAssignableFrom(factory)) throw new IllegalArgumentException(String.format("transport factory '%s' " + "not derived from ITransportFactory", transportFactory)); return (ITransportFactory) factory.newInstance(); } catch (Exception e) { throw new IllegalArgumentException(String.format("Cannot create a transport factory '%s'.", transportFactory), e); } } private static void configureTransportFactory(ITransportFactory transportFactory, EncryptionOptions encOptions) { Map<String, String> options = new HashMap<>(); // If the supplied factory supports the same set of options as our SSL impl, set those if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE)) options.put(SSLTransportFactory.TRUSTSTORE, encOptions.truststore); if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE_PASSWORD)) options.put(SSLTransportFactory.TRUSTSTORE_PASSWORD, encOptions.truststore_password); if (transportFactory.supportedOptions().contains(SSLTransportFactory.PROTOCOL)) options.put(SSLTransportFactory.PROTOCOL, encOptions.protocol); if (transportFactory.supportedOptions().contains(SSLTransportFactory.CIPHER_SUITES)) options.put(SSLTransportFactory.CIPHER_SUITES, Joiner.on(',').join(encOptions.cipher_suites)); if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE) && encOptions.require_client_auth) options.put(SSLTransportFactory.KEYSTORE, encOptions.keystore); if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE_PASSWORD) && encOptions.require_client_auth) options.put(SSLTransportFactory.KEYSTORE_PASSWORD, encOptions.keystore_password); // Now check if any of the factory's supported options are set as system properties for (String optionKey : transportFactory.supportedOptions()) if (System.getProperty(optionKey) != null) options.put(optionKey, System.getProperty(optionKey)); transportFactory.setOptions(options); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.security; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.FakeTimer; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; public class TestGroupsCaching { public static final Log LOG = LogFactory.getLog(TestGroupsCaching.class); private static String[] myGroups = {"grp1", "grp2"}; private Configuration conf; @Before public void setup() { FakeGroupMapping.resetRequestCount(); ExceptionalGroupMapping.resetRequestCount(); conf = new Configuration(); conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, FakeGroupMapping.class, ShellBasedUnixGroupsMapping.class); } public static class FakeGroupMapping extends ShellBasedUnixGroupsMapping { // any to n mapping private static Set<String> allGroups = new HashSet<String>(); private static Set<String> blackList = new HashSet<String>(); private static int requestCount = 0; private static long getGroupsDelayMs = 0; @Override public List<String> getGroups(String user) throws IOException { LOG.info("Getting groups for " + user); requestCount++; delayIfNecessary(); if (blackList.contains(user)) { return new LinkedList<String>(); } return new LinkedList<String>(allGroups); } private void delayIfNecessary() { if (getGroupsDelayMs > 0) { try { Thread.sleep(getGroupsDelayMs); } catch (InterruptedException e) { throw new RuntimeException(e); } } } @Override public void cacheGroupsRefresh() throws IOException { LOG.info("Cache is being refreshed."); clearBlackList(); return; } public static void clearBlackList() throws IOException { LOG.info("Clearing the blacklist"); blackList.clear(); } @Override public void cacheGroupsAdd(List<String> groups) throws IOException { LOG.info("Adding " + groups + " to groups."); allGroups.addAll(groups); } public static void addToBlackList(String user) throws IOException { LOG.info("Adding " + user + " to the blacklist"); blackList.add(user); } public static int getRequestCount() { return requestCount; } public static void resetRequestCount() { requestCount = 0; } public static void setGetGroupsDelayMs(long delayMs) { getGroupsDelayMs = delayMs; } } public static class ExceptionalGroupMapping extends ShellBasedUnixGroupsMapping { private static int requestCount = 0; @Override public List<String> getGroups(String user) throws IOException { requestCount++; throw new IOException("For test"); } public static int getRequestCount() { return requestCount; } public static void resetRequestCount() { requestCount = 0; } } @Test public void testGroupsCaching() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.addToBlackList("user1"); // regular entry assertTrue(groups.getGroups("me").size() == 2); // this must be cached. blacklisting should have no effect. FakeGroupMapping.addToBlackList("me"); assertTrue(groups.getGroups("me").size() == 2); // ask for a negative entry try { LOG.error("We are not supposed to get here." + groups.getGroups("user1").toString()); fail(); } catch (IOException ioe) { if(!ioe.getMessage().startsWith("No groups found")) { LOG.error("Got unexpected exception: " + ioe.getMessage()); fail(); } } // this shouldn't be cached. remove from the black list and retry. FakeGroupMapping.clearBlackList(); assertTrue(groups.getGroups("user1").size() == 2); } public static class FakeunPrivilegedGroupMapping extends FakeGroupMapping { private static boolean invoked = false; @Override public List<String> getGroups(String user) throws IOException { invoked = true; return super.getGroups(user); } } /* * Group lookup should not happen for static users */ @Test public void testGroupLookupForStaticUsers() throws Exception { conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, FakeunPrivilegedGroupMapping.class, ShellBasedUnixGroupsMapping.class); conf.set(CommonConfigurationKeys.HADOOP_USER_GROUP_STATIC_OVERRIDES, "me=;user1=group1;user2=group1,group2"); Groups groups = new Groups(conf); List<String> userGroups = groups.getGroups("me"); assertTrue("non-empty groups for static user", userGroups.isEmpty()); assertFalse("group lookup done for static user", FakeunPrivilegedGroupMapping.invoked); List<String> expected = new ArrayList<String>(); expected.add("group1"); FakeunPrivilegedGroupMapping.invoked = false; userGroups = groups.getGroups("user1"); assertTrue("groups not correct", expected.equals(userGroups)); assertFalse("group lookup done for unprivileged user", FakeunPrivilegedGroupMapping.invoked); expected.add("group2"); FakeunPrivilegedGroupMapping.invoked = false; userGroups = groups.getGroups("user2"); assertTrue("groups not correct", expected.equals(userGroups)); assertFalse("group lookup done for unprivileged user", FakeunPrivilegedGroupMapping.invoked); } @Test public void testNegativeGroupCaching() throws Exception { final String user = "negcache"; final String failMessage = "Did not throw IOException: "; conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2); FakeTimer timer = new FakeTimer(); Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.addToBlackList(user); // In the first attempt, the user will be put in the negative cache. try { groups.getGroups(user); fail(failMessage + "Failed to obtain groups from FakeGroupMapping."); } catch (IOException e) { // Expects to raise exception for the first time. But the user will be // put into the negative cache GenericTestUtils.assertExceptionContains("No groups found for user", e); } // The second time, the user is in the negative cache. try { groups.getGroups(user); fail(failMessage + "The user is in the negative cache."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Brings back the backend user-group mapping service. FakeGroupMapping.clearBlackList(); // It should still get groups from the negative cache. try { groups.getGroups(user); fail(failMessage + "The user is still in the negative cache, even " + "FakeGroupMapping has resumed."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Let the elements in the negative cache expire. timer.advance(4 * 1000); // The groups for the user is expired in the negative cache, a new copy of // groups for the user is fetched. assertEquals(Arrays.asList(myGroups), groups.getGroups(user)); } @Test public void testCachePreventsImplRequest() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); assertEquals(0, FakeGroupMapping.getRequestCount()); // First call hits the wire assertTrue(groups.getGroups("me").size() == 2); assertEquals(1, FakeGroupMapping.getRequestCount()); // Second count hits cache assertTrue(groups.getGroups("me").size() == 2); assertEquals(1, FakeGroupMapping.getRequestCount()); } @Test public void testExceptionsFromImplNotCachedInNegativeCache() { conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, ExceptionalGroupMapping.class, ShellBasedUnixGroupsMapping.class); conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 10000); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); assertEquals(0, ExceptionalGroupMapping.getRequestCount()); // First call should hit the wire try { groups.getGroups("anything"); fail("Should have thrown"); } catch (IOException e) { // okay } assertEquals(1, ExceptionalGroupMapping.getRequestCount()); // Second call should hit the wire (no negative caching) try { groups.getGroups("anything"); fail("Should have thrown"); } catch (IOException e) { // okay } assertEquals(2, ExceptionalGroupMapping.getRequestCount()); } @Test public void testOnlyOneRequestWhenNoEntryIsCached() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); final Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.setGetGroupsDelayMs(100); ArrayList<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < 10; i++) { threads.add(new Thread() { public void run() { try { assertEquals(2, groups.getGroups("me").size()); } catch (IOException e) { fail("Should not happen"); } } }); } // We start a bunch of threads who all see no cached value for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } // But only one thread should have made the request assertEquals(1, FakeGroupMapping.getRequestCount()); } @Test public void testOnlyOneRequestWhenExpiredEntryExists() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.setGetGroupsDelayMs(100); // We make an initial request to populate the cache groups.getGroups("me"); int startingRequestCount = FakeGroupMapping.getRequestCount(); // Then expire that entry timer.advance(400 * 1000); Thread.sleep(100); ArrayList<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < 10; i++) { threads.add(new Thread() { public void run() { try { assertEquals(2, groups.getGroups("me").size()); } catch (IOException e) { fail("Should not happen"); } } }); } // We start a bunch of threads who all see the cached value for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } // Only one extra request is made assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testCacheEntriesExpire() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an entry groups.getGroups("me"); int startingRequestCount = FakeGroupMapping.getRequestCount(); timer.advance(20 * 1000); // Cache entry has expired so it results in a new fetch groups.getGroups("me"); assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testNegativeCacheClearedOnRefresh() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 100); final Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.addToBlackList("dne"); try { groups.getGroups("dne"); fail("Should have failed to find this group"); } catch (IOException e) { // pass } int startingRequestCount = FakeGroupMapping.getRequestCount(); groups.refresh(); FakeGroupMapping.addToBlackList("dne"); try { List<String> g = groups.getGroups("dne"); fail("Should have failed to find this group"); } catch (IOException e) { // pass } assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testNegativeCacheEntriesExpire() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2); FakeTimer timer = new FakeTimer(); // Ensure that stale entries are removed from negative cache every 2 seconds Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); // Add both these users to blacklist so that they // can be added to negative cache FakeGroupMapping.addToBlackList("user1"); FakeGroupMapping.addToBlackList("user2"); // Put user1 in negative cache. try { groups.getGroups("user1"); fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Check if user1 exists in negative cache assertTrue(groups.getNegativeCache().contains("user1")); // Advance fake timer timer.advance(1000); // Put user2 in negative cache try { groups.getGroups("user2"); fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Check if user2 exists in negative cache assertTrue(groups.getNegativeCache().contains("user2")); // Advance timer. Only user2 should be present in negative cache. timer.advance(1100); assertFalse(groups.getNegativeCache().contains("user1")); assertTrue(groups.getNegativeCache().contains("user2")); // Advance timer. Even user2 should not be present in negative cache. timer.advance(1000); assertFalse(groups.getNegativeCache().contains("user2")); } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.actions; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Action.MiddlemanType; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.shell.ShellUtils; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.syntax.Printer; import com.google.devtools.build.lib.syntax.SkylarkCallable; import com.google.devtools.build.lib.syntax.SkylarkModule; import com.google.devtools.build.lib.syntax.SkylarkValue; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import javax.annotation.Nullable; /** * An Artifact represents a file used by the build system, whether it's a source * file or a derived (output) file. Not all Artifacts have a corresponding * FileTarget object in the <code>build.packages</code> API: for example, * low-level intermediaries internal to a given rule, such as a Java class files * or C++ object files. However all FileTargets have a corresponding Artifact. * * <p>In any given call to Builder#buildArtifacts(), no two Artifacts in the * action graph may refer to the same path. * * <p>Artifacts generally fall into two classifications, source and derived, but * there exist a few other cases that are fuzzy and difficult to classify. The * following cases exist: * <ul> * <li>Well-formed source Artifacts will have null generating Actions and a root * that is orthogonal to execRoot. (With the root coming from the package path.) * <li>Well-formed derived Artifacts will have non-null generating Actions, and * a root that is below execRoot. * <li>Symlinked include source Artifacts under the output/include tree will * appear to be derived artifacts with null generating Actions. * <li>Some derived Artifacts, mostly in the genfiles tree and mostly discovered * during include validation, will also have null generating Actions. * </ul> * * <p>This class is "theoretically" final; it should not be subclassed except by * {@link SpecialArtifact}. */ @Immutable @SkylarkModule(name = "File", doc = "This type represents a file used by the build system. It can be " + "either a source file or a derived file produced by a rule.") public class Artifact implements FileType.HasFilename, ActionInput, SkylarkValue { /** * Compares artifact according to their exec paths. Sorts null values first. */ public static final Comparator<Artifact> EXEC_PATH_COMPARATOR = new Comparator<Artifact>() { @Override public int compare(Artifact a, Artifact b) { if (a == b) { return 0; } else if (a == null) { return -1; } else if (b == null) { return -1; } else { return a.execPath.compareTo(b.execPath); } } }; /** An object that can expand middleman artifacts. */ public interface MiddlemanExpander { /** * Expands the middleman artifact "mm", and populates "output" with the result. * * <p>{@code mm.isMiddlemanArtifact()} must be true. Only aggregating middlemen are expanded. */ void expand(Artifact mm, Collection<? super Artifact> output); } public static final ImmutableList<Artifact> NO_ARTIFACTS = ImmutableList.of(); /** * A Predicate that evaluates to true if the Artifact is not a middleman artifact. */ public static final Predicate<Artifact> MIDDLEMAN_FILTER = new Predicate<Artifact>() { @Override public boolean apply(Artifact input) { return !input.isMiddlemanArtifact(); } }; private final Path path; private final Root root; private final PathFragment execPath; private final PathFragment rootRelativePath; // Non-final only for use when dealing with deserialized artifacts. private ArtifactOwner owner; /** * Constructs an artifact for the specified path, root and execPath. The root must be an ancestor * of path, and execPath must be a non-absolute tail of path. Outside of testing, this method * should only be called by ArtifactFactory. The ArtifactOwner may be null. * * <p>In a source Artifact, the path tail after the root will be identical to the execPath, but * the root will be orthogonal to execRoot. * <pre> * [path] == [/root/][execPath] * </pre> * * <p>In a derived Artifact, the execPath will overlap with part of the root, which in turn will * be below of the execRoot. * <pre> * [path] == [/root][pathTail] == [/execRoot][execPath] == [/execRoot][rootPrefix][pathTail] * <pre> */ @VisibleForTesting public Artifact(Path path, Root root, PathFragment execPath, ArtifactOwner owner) { if (root == null || !path.startsWith(root.getPath())) { throw new IllegalArgumentException(root + ": illegal root for " + path + " (execPath: " + execPath + ")"); } if (execPath == null || execPath.isAbsolute() || !path.asFragment().endsWith(execPath)) { throw new IllegalArgumentException(execPath + ": illegal execPath for " + path + " (root: " + root + ")"); } this.path = path; this.root = root; this.execPath = execPath; // These two lines establish the invariant that // execPath == rootRelativePath <=> execPath.equals(rootRelativePath) // This is important for isSourceArtifact. PathFragment rootRel = path.relativeTo(root.getPath()); if (!execPath.endsWith(rootRel)) { throw new IllegalArgumentException(execPath + ": illegal execPath doesn't end with " + rootRel + " at " + path + " with root " + root); } this.rootRelativePath = rootRel.equals(execPath) ? execPath : rootRel; this.owner = Preconditions.checkNotNull(owner, path); } /** * Constructs an artifact for the specified path, root and execPath. The root must be an ancestor * of path, and execPath must be a non-absolute tail of path. Should only be called for testing. * * <p>In a source Artifact, the path tail after the root will be identical to the execPath, but * the root will be orthogonal to execRoot. * <pre> * [path] == [/root/][execPath] * </pre> * * <p>In a derived Artifact, the execPath will overlap with part of the root, which in turn will * be below of the execRoot. * <pre> * [path] == [/root][pathTail] == [/execRoot][execPath] == [/execRoot][rootPrefix][pathTail] * <pre> */ @VisibleForTesting public Artifact(Path path, Root root, PathFragment execPath) { this(path, root, execPath, ArtifactOwner.NULL_OWNER); } /** * Constructs a source or derived Artifact for the specified path and specified root. The root * must be an ancestor of the path. */ @VisibleForTesting // Only exists for testing. public Artifact(Path path, Root root) { this(path, root, root.getExecPath().getRelative(path.relativeTo(root.getPath())), ArtifactOwner.NULL_OWNER); } /** * Constructs a source or derived Artifact for the specified root-relative path and root. */ @VisibleForTesting // Only exists for testing. public Artifact(PathFragment rootRelativePath, Root root) { this(root.getPath().getRelative(rootRelativePath), root, root.getExecPath().getRelative(rootRelativePath), ArtifactOwner.NULL_OWNER); } /** * Returns the location of this Artifact on the filesystem. */ public final Path getPath() { return path; } /** * Returns the directory name of this artifact, similar to dirname(1). * * <p> The directory name is always a relative path to the execution directory. */ @SkylarkCallable(name = "dirname", structField = true, doc = "The name of the directory containing this file.") public final String getDirname() { PathFragment parent = getExecPath().getParentDirectory(); return (parent == null) ? "/" : parent.getSafePathString(); } /** * Returns the base file name of this artifact, similar to basename(1). */ @Override @SkylarkCallable(name = "basename", structField = true, doc = "The base file name of this file.") public final String getFilename() { return getExecPath().getBaseName(); } /** * Returns the artifact owner. May be null. */ @Nullable public final Label getOwner() { return owner.getLabel(); } /** * Get the {@code LabelAndConfiguration} of the {@code ConfiguredTarget} that owns this artifact, * if it was set. Otherwise, this should be a dummy value -- either {@link * ArtifactOwner#NULL_OWNER} or a dummy owner set in tests. Such a dummy value should only occur * for source artifacts if created without specifying the owner, or for special derived artifacts, * such as target completion middleman artifacts, build info artifacts, and the like. * * <p>When deserializing artifacts we end up with a dummy owner. In that case, * it must be set using {@link #setArtifactOwner} before this method is called. */ public final ArtifactOwner getArtifactOwner() { Preconditions.checkState(owner != DESERIALIZED_MARKER_OWNER, this); return owner; } /** * Sets the artifact owner of this artifact. Should only be called for artifacts that were created * through deserialization, and so their owner was unknown at the time of creation. */ public final void setArtifactOwner(ArtifactOwner owner) { if (this.owner == DESERIALIZED_MARKER_OWNER) { // We tolerate multiple calls of this method to accommodate shared actions. this.owner = Preconditions.checkNotNull(owner, this); } } /** * Returns the root beneath which this Artifact resides, if any. This may be one of the * package-path entries (for source Artifacts), or one of the bin, genfiles or includes dirs * (for derived Artifacts). It will always be an ancestor of getPath(). */ public final Root getRoot() { return root; } /** * Returns the exec path of this Artifact. The exec path is a relative path * that is suitable for accessing this artifact relative to the execution * directory for this build. */ public final PathFragment getExecPath() { return execPath; } /** * Returns true iff this is a source Artifact as determined by its path and * root relationships. Note that this will report all Artifacts in the output * tree, including in the include symlink tree, as non-source. */ public final boolean isSourceArtifact() { return execPath == rootRelativePath; } /** * Returns true iff this is a middleman Artifact as determined by its root. */ public final boolean isMiddlemanArtifact() { return getRoot().isMiddlemanRoot(); } /** * Returns whether the artifact represents a Fileset. */ public boolean isFileset() { return false; } /** * Returns true iff metadata cache must return constant metadata for the * given artifact. */ public boolean isConstantMetadata() { return false; } /** * Special artifact types. * * @see SpecialArtifact */ static enum SpecialArtifactType { FILESET, CONSTANT_METADATA, } /** * A special kind of artifact that either is a fileset or needs special metadata caching behavior. * * <p>We subclass {@link Artifact} instead of storing the special attributes inside in order * to save memory. The proportion of artifacts that are special is very small, and by not having * to keep around the attribute for the rest we save some memory. */ @Immutable @VisibleForTesting public static final class SpecialArtifact extends Artifact { private final SpecialArtifactType type; SpecialArtifact(Path path, Root root, PathFragment execPath, ArtifactOwner owner, SpecialArtifactType type) { super(path, root, execPath, owner); this.type = type; } @Override public final boolean isFileset() { return type == SpecialArtifactType.FILESET; } @Override public boolean isConstantMetadata() { return type == SpecialArtifactType.CONSTANT_METADATA; } } /** * Returns the relative path to this artifact relative to its root. (Useful * when deriving output filenames from input files, etc.) */ public final PathFragment getRootRelativePath() { return rootRelativePath; } /** * Returns this.getExecPath().getPathString(). */ @Override @SkylarkCallable(name = "path", structField = true, doc = "The execution path of this file, relative to the execution directory. It consists of " + "two parts, an optional first part called the <i>root</i> (see also the <a " + "href=\"root.html\">root</a> module), and the second part which is the " + "<code>short_path</code>. The root may be empty, which it usually is for non-generated " + "files. For generated files it usually contains a configuration-specific path fragment that" + " encodes things like the target CPU architecture that was used while building said file.") public final String getExecPathString() { return getExecPath().getPathString(); } /* * Returns getExecPathString escaped for potential use in a shell command. */ public final String getShellEscapedExecPathString() { return ShellUtils.shellEscape(getExecPathString()); } @SkylarkCallable(name = "short_path", structField = true, doc = "The path of this file relative to its root. This excludes the aforementioned " + "<i>root</i>, i.e. configuration-specific fragments of the path. This is also the path " + "under which the file is mapped if its in the runfiles of a binary.") public final String getRootRelativePathString() { return getRootRelativePath().getPathString(); } /** * Returns a pretty string representation of the path denoted by this artifact, suitable for use * in user error messages. Artifacts beneath a root will be printed relative to that root; other * artifacts will be printed as an absolute path. * * <p>(The toString method is intended for developer messages since its more informative.) */ public final String prettyPrint() { // toDetailString would probably be more useful to users, but lots of tests rely on the // current values. return rootRelativePath.toString(); } @Override public final boolean equals(Object other) { if (!(other instanceof Artifact)) { return false; } // We don't bother to check root in the equivalence relation, because we // assume that no root is an ancestor of another one. Artifact that = (Artifact) other; return this.path.equals(that.path); } @Override public final int hashCode() { return path.hashCode(); } @Override public final String toString() { return "Artifact:" + toDetailString(); } /** * Returns the root-part of a given path by trimming off the end specified by * a given tail. Assumes that the tail is known to match, and simply relies on * the segment lengths. */ private static PathFragment trimTail(PathFragment path, PathFragment tail) { return path.subFragment(0, path.segmentCount() - tail.segmentCount()); } /** * Returns a string representing the complete artifact path information. */ public final String toDetailString() { if (isSourceArtifact()) { // Source Artifact: relPath == execPath, & real path is not under execRoot return "[" + root + "]" + rootRelativePath; } else { // Derived Artifact: path and root are under execRoot PathFragment execRoot = trimTail(path.asFragment(), execPath); return "[[" + execRoot + "]" + root.getPath().asFragment().relativeTo(execRoot) + "]" + rootRelativePath; } } /** * Serializes this artifact to a string that has enough data to reconstruct the artifact. */ public final String serializeToString() { // In theory, it should be enough to serialize execPath and rootRelativePath (which is a suffix // of execPath). However, in practice there is code around that uses other attributes which // needs cleaning up. String result = execPath + " /" + rootRelativePath.toString().length(); if (getOwner() != null) { result += " " + getOwner(); } return result; } //--------------------------------------------------------------------------- // Static methods to assist in working with Artifacts /** * Formatter for execPath PathFragment output. */ private static final Function<Artifact, PathFragment> EXEC_PATH_FORMATTER = new Function<Artifact, PathFragment>() { @Override public PathFragment apply(Artifact input) { return input.getExecPath(); } }; private static final Function<Artifact, String> ROOT_RELATIVE_PATH_STRING = new Function<Artifact, String>() { @Override public String apply(Artifact artifact) { return artifact.getRootRelativePath().getPathString(); } }; /** * Converts a collection of artifacts into execution-time path strings, and * adds those to a given collection. Middleman artifacts are ignored by this * method. */ public static void addExecPaths(Iterable<Artifact> artifacts, Collection<String> output) { addNonMiddlemanArtifacts(artifacts, output, ActionInputHelper.EXEC_PATH_STRING_FORMATTER); } /** * Converts a collection of artifacts into the outputs computed by * outputFormatter and adds them to a given collection. Middleman artifacts * are ignored. */ static <E> void addNonMiddlemanArtifacts(Iterable<Artifact> artifacts, Collection<? super E> output, Function<? super Artifact, E> outputFormatter) { for (Artifact artifact : artifacts) { if (MIDDLEMAN_FILTER.apply(artifact)) { output.add(outputFormatter.apply(artifact)); } } } /** * Lazily converts artifacts into root-relative path strings. Middleman artifacts are ignored by * this method. */ public static Iterable<String> toRootRelativePaths(Iterable<Artifact> artifacts) { return Iterables.transform( Iterables.filter(artifacts, MIDDLEMAN_FILTER), ROOT_RELATIVE_PATH_STRING); } /** * Lazily converts artifacts into execution-time path strings. Middleman artifacts are ignored by * this method. */ public static Iterable<String> toExecPaths(Iterable<Artifact> artifacts) { return ActionInputHelper.toExecPaths(Iterables.filter(artifacts, MIDDLEMAN_FILTER)); } /** * Converts a collection of artifacts into execution-time path strings, and * returns those as an immutable list. Middleman artifacts are ignored by this method. */ public static List<String> asExecPaths(Iterable<Artifact> artifacts) { return ImmutableList.copyOf(toExecPaths(artifacts)); } /** * Renders a collection of artifacts as execution-time paths and joins * them into a single string. Middleman artifacts are ignored by this method. */ public static String joinExecPaths(String delimiter, Iterable<Artifact> artifacts) { return Joiner.on(delimiter).join(toExecPaths(artifacts)); } /** * Renders a collection of artifacts as root-relative paths and joins * them into a single string. Middleman artifacts are ignored by this method. */ public static String joinRootRelativePaths(String delimiter, Iterable<Artifact> artifacts) { return Joiner.on(delimiter).join(toRootRelativePaths(artifacts)); } /** * Adds a collection of artifacts to a given collection, with * {@link MiddlemanType#AGGREGATING_MIDDLEMAN} middleman actions expanded once. */ public static void addExpandedArtifacts(Iterable<Artifact> artifacts, Collection<? super Artifact> output, MiddlemanExpander middlemanExpander) { addExpandedArtifacts(artifacts, output, Functions.<Artifact>identity(), middlemanExpander); } /** * Converts a collection of artifacts into execution-time path strings, and * adds those to a given collection. Middleman artifacts for * {@link MiddlemanType#AGGREGATING_MIDDLEMAN} middleman actions are expanded * once. */ @VisibleForTesting public static void addExpandedExecPathStrings(Iterable<Artifact> artifacts, Collection<String> output, MiddlemanExpander middlemanExpander) { addExpandedArtifacts(artifacts, output, ActionInputHelper.EXEC_PATH_STRING_FORMATTER, middlemanExpander); } /** * Converts a collection of artifacts into execution-time path fragments, and * adds those to a given collection. Middleman artifacts for * {@link MiddlemanType#AGGREGATING_MIDDLEMAN} middleman actions are expanded * once. */ public static void addExpandedExecPaths(Iterable<Artifact> artifacts, Collection<PathFragment> output, MiddlemanExpander middlemanExpander) { addExpandedArtifacts(artifacts, output, EXEC_PATH_FORMATTER, middlemanExpander); } /** * Converts a collection of artifacts into the outputs computed by * outputFormatter and adds them to a given collection. Middleman artifacts * are expanded once. */ private static <E> void addExpandedArtifacts(Iterable<Artifact> artifacts, Collection<? super E> output, Function<? super Artifact, E> outputFormatter, MiddlemanExpander middlemanExpander) { for (Artifact artifact : artifacts) { if (artifact.isMiddlemanArtifact()) { expandMiddlemanArtifact(artifact, output, outputFormatter, middlemanExpander); } else { output.add(outputFormatter.apply(artifact)); } } } private static <E> void expandMiddlemanArtifact(Artifact middleman, Collection<? super E> output, Function<? super Artifact, E> outputFormatter, MiddlemanExpander middlemanExpander) { Preconditions.checkArgument(middleman.isMiddlemanArtifact()); List<Artifact> artifacts = new ArrayList<>(); middlemanExpander.expand(middleman, artifacts); for (Artifact artifact : artifacts) { output.add(outputFormatter.apply(artifact)); } } /** * Converts a collection of artifacts into execution-time path strings, and * returns those as a list. Middleman artifacts are expanded once. The * returned list is mutable. */ public static List<String> asExpandedExecPathStrings(Iterable<Artifact> artifacts, MiddlemanExpander middlemanExpander) { List<String> result = new ArrayList<>(); addExpandedExecPathStrings(artifacts, result, middlemanExpander); return result; } /** * Converts a collection of artifacts into execution-time path fragments, and * returns those as a list. Middleman artifacts are expanded once. The * returned list is mutable. */ public static List<PathFragment> asExpandedExecPaths(Iterable<Artifact> artifacts, MiddlemanExpander middlemanExpander) { List<PathFragment> result = new ArrayList<>(); addExpandedExecPaths(artifacts, result, middlemanExpander); return result; } /** * Converts a collection of artifacts into execution-time path strings with * the root-break delimited with a colon ':', and adds those to a given list. * <pre> * Source: sourceRoot/rootRelative => :rootRelative * Derived: execRoot/rootPrefix/rootRelative => rootPrefix:rootRelative * </pre> */ public static void addRootPrefixedExecPaths(Iterable<Artifact> artifacts, List<String> output) { for (Artifact artifact : artifacts) { output.add(asRootPrefixedExecPath(artifact)); } } /** * Convenience method to filter the files to build for a certain filetype. * * @param artifacts the files to filter * @param allowedType the allowed filetype * @return all members of filesToBuild that are of one of the * allowed filetypes */ public static List<Artifact> filterFiles(Iterable<Artifact> artifacts, FileType allowedType) { List<Artifact> filesToBuild = new ArrayList<>(); for (Artifact artifact : artifacts) { if (allowedType.matches(artifact.getFilename())) { filesToBuild.add(artifact); } } return filesToBuild; } @VisibleForTesting static String asRootPrefixedExecPath(Artifact artifact) { PathFragment execPath = artifact.getExecPath(); PathFragment rootRel = artifact.getRootRelativePath(); if (execPath.equals(rootRel)) { return ":" + rootRel.getPathString(); } else { //if (execPath.endsWith(rootRel)) { PathFragment rootPrefix = trimTail(execPath, rootRel); return rootPrefix.getPathString() + ":" + rootRel.getPathString(); } } /** * Converts artifacts into their exec paths. Returns an immutable list. */ public static List<PathFragment> asPathFragments(Iterable<Artifact> artifacts) { return ImmutableList.copyOf(Iterables.transform(artifacts, EXEC_PATH_FORMATTER)); } static final ArtifactOwner DESERIALIZED_MARKER_OWNER = new ArtifactOwner() { @Override public Label getLabel() { return null; }}; @Override public boolean isImmutable() { return true; } @Override public void write(Appendable buffer, char quotationMark) { Printer.append(buffer, toString()); // TODO(bazel-team): implement a readable representation } }
/* HttpCListener.java */ package org.xlattice.httpd; import java.io.IOException; import java.nio.ByteBuffer; import org.xlattice.transport.ConnectionListener; import org.xlattice.transport.SchedulableConnection; import org.xlattice.util.NonBlockingLog; /** * @author Jim Dixon */ import org.xlattice.CryptoException; // DEBUG import org.xlattice.crypto.SHA1Digest; // DEBUG import org.xlattice.util.StringLib; // DEBUG /** * Constructs an HTTP client connection listener. This will * normally be created by a SchedulableConnector. * * XXX THERE IS A KNOWN PROBLEM WITH MESSAGES OVER * XXX SchedulableTcpConnection.CNX_BUFSIZE, currently 64KB * * @author <A HREF="mailto:jddixon@users.sourceforge.net">Jim Dixon</A> */ public class HttpCListener extends HttpParser implements ConnectionListener { // PRIVATE MEMBERS ////////////////////////////////////////////// private int method; private String uri; // private boolean inHeaderSection = true; // CONSTRUCTORS ///////////////////////////////////////////////// public HttpCListener (HttpRequest request) throws MessageFormatException { this(request, HttpParser.HTTP_BUFSIZE, -1); } public HttpCListener (HttpRequest request, int maxBytes, int debugIndex ) { // XXX super("error.log", "debug.log"); index = debugIndex; // instance index; was counter++ if (request == null) throw new IllegalArgumentException("mull HttpRequest"); this.request = request; uri = request.getURI(); version = request.getHttpVersion(); if (maxBytes <= 0) throw new IllegalArgumentException("negative maxBytes"); // XXX FUDGE FACTOR - this crudely allows for a header dataIn = ByteBuffer.allocate(maxBytes + 512); dataIn.clear(); parserState = START_HEAD; DEBUG_MSG(" constructor: maxBytes = " + maxBytes + ", request:\n" + request.toString()); } // LOGGING ////////////////////////////////////////////////////// protected void DEBUG_MSG(String msg) { if (debugLog != null) debugLog.message("HttpCListener[" + index + "]" + msg); } protected void ERROR_MSG(String msg) { if (errorLog != null) errorLog.message("HttpCListener[" + index + "]" + msg); } // INTERFACE ConnectionListener ///////////////////////////////// public void setConnection (SchedulableConnection cnx, ByteBuffer buffer) { if (cnx == null || buffer == null) throw new IllegalArgumentException ("null connection or buffer"); this.cnx = cnx; cnxInBuf = buffer; dataOut = ByteBuffer.wrap(request.getByteArray()); cnx.sendData(dataOut); DEBUG_MSG(".setConnection: data sent to server"); } public void dataSent () { cnx.initiateReading(); } /** * If version 0.9, the entire response is the entityBody. * * Otherwise, the first part of the message must be parsed, * then everything after the blank line is the entity body. * * XXX For the moment assume that if version 1.x, then everything * XXX up to the blank line must be in the first packet. */ public void dataReceived () { cnx.getKey().interestOps(0); // quiet, please cnxInBuf.flip(); dataIn.clear(); // WRONG, but we're desperate DEBUG_MSG(".dataReceived, version " + VERSIONS[version] + "\n cnxInBuf.position = " + cnxInBuf.position() + "\n cnxInBuf.limit = " + cnxInBuf.limit() + "\n starts with " + firstTen(cnxInBuf.array()) + "\n dataIn.position = " + dataIn.position() + "\n dataIn.limit = " + dataIn.limit() + "\n dataIn.remaining = " + dataIn.remaining() + "\n dataIn.capacity = " + dataIn.capacity() ); int spaceAvail = dataIn.capacity() - dataIn.position(); if (cnxInBuf.limit() <= spaceAvail) { DEBUG_MSG(".dataReceived (a), put " + cnxInBuf.limit() + " bytes"); // hangs at the next statement unless we do a dataIn.clear() dataIn.put(cnxInBuf); DEBUG_MSG(".dataReceived (a), about to clear cnxInBuf"); cnxInBuf.clear(); DEBUG_MSG(".dataReceived (a), cnxInBuf cleared"); } else { DEBUG_MSG(".dataReceived (b), put " + dataIn.remaining() + " bytes"); dataIn.put(cnxInBuf.array(), 0, dataIn.remaining()); cnxInBuf.position(dataIn.remaining()); cnxInBuf.compact(); DEBUG_MSG(".dataReceived (b), after compacting" + "\n cnxInBuf.position = " + cnxInBuf.position() + "\n cnxInBuf.limit = " + cnxInBuf.limit() + "\n cnxInBuf.remaining = " + cnxInBuf.remaining() + "\n cnxInBuf.capacity = " + cnxInBuf.capacity() + "\n starts with " + firstTen(cnxInBuf.array()) ); } DEBUG_MSG(".dataReceived, after copying " + "\n dataIn.position = " + dataIn.position() + "\n dataIn.limit = " + dataIn.limit() + "\n dataIn.remaining = " + dataIn.remaining() + "\n dataIn.capacity = " + dataIn.capacity() + "\n starts with " + firstTen(dataIn.array()) ); dataIn.flip(); DEBUG_MSG(".dataReceived, after flipping " + "\n dataIn.position = " + dataIn.position() + "\n dataIn.limit = " + dataIn.limit() + "\n dataIn.remaining = " + dataIn.remaining() + "\n dataIn.capacity = " + dataIn.capacity() + "\n starts with " + firstTen(dataIn.array()) ); DEBUG_MSG(".dataReceived, top of switch, state " + STATES[parserState]); switch (parserState) { case START_HEAD: DEBUG_MSG(".dataReceived: START_HEAD"); case IN_HEAD: parseHeaderSection(); DEBUG_MSG(" after parseHeadSection, state is " + STATES[parserState]); if (parserState == IN_HEAD) { cnx.initiateReading(); return; } case START_ENTITY: DEBUG_MSG(".dataReceived: START_ENTITY"); if (contentLength > dataIn.capacity()) { ERROR_MSG( "INTERNAL ERROR: contentLength = " + contentLength + " but buffer capacity is only " + dataIn.capacity() ); parserState = ABORT_PARSE; _close(); return; } case IN_ENTITY: DEBUG_MSG(".dataReceived: IN_ENTITY" + "\n contentLength = " + contentLength + "\n dataIn.limit = " + dataIn.limit() ); if (version != V0_9 && dataIn.limit() < contentLength) { DEBUG_MSG(".dataReceived, IN_ENTITY, need more data"); parserState = IN_ENTITY; cnx.initiateReading(); return; } DEBUG_MSG(".dataReceived, collecting entity"); try { // collect the entity collectEntity(response, dataIn); // does the flip() DEBUG_MSG( ".dataReceived; entity collected\n starts with " + firstTen(response.getEntity())); } catch (MessageFormatException mfe) { ERROR_MSG(".dataReceived, from collectEntity: " + mfe); parserState = ABORT_PARSE; _close(); return; } case END_ENTITY: DEBUG_MSG(".dataReceived: END_ENTITY"); dataIn.clear(); _close(); return; case ABORT_PARSE: ERROR_MSG(".dataReceived, unexpected ABORT_PARSE"); _close(); return; default: ERROR_MSG(" INTERNAL ERROR: illegal parser state " + parserState); _close(); return; } // if (version == V0_9) { // // HTTP/0.9 /////////////////////////////////// // contentLength = -1; // yes, I'm neurotic // if (dataIn.position() >= dataIn.capacity()) { // DEBUG_MSG(".dataReceived: dataIn filled"); // try { // response = new HttpResponse(); // response.setEntity(dataIn.array()); // } catch (MessageFormatException mfe) { // /* assumed to be impossible */ // ERROR_MSG(" internal error? " + mfe); // } // _close(); // } // } else { // // HTTP/1.x /////////////////////////////////// // if (inHeaderSection) { // try { // DEBUG_MSG(".dataReceived, parsing status line"); // response = parseStatusLine(dataIn); // DEBUG_MSG(".dataReceived, status line is:\n " // + response.getStatusLine()); // // collect headers // collectHeaders(response, dataIn); // DEBUG_MSG(".dataReceived, content length = " // + contentLength); // skipEOL(dataIn); // adjusts position // dataIn.compact(); // DEBUG_MSG(".dataReceived, after compacting" // + "\n begins with " + firstTen(dataIn.array()) // + "\n position is " + dataIn.position() // ); // inHeaderSection = false; // // DEBUG_MSG(".dataReceived, have the entity"); // } catch (MessageFormatException mfe) { // ERROR_MSG(": " + mfe); // _close(); // return; // GEEP // } // } else { // DEBUG_MSG(".dataReceived, collecting entity"); // try { // // collect the entity // collectEntity(response, dataIn); // does the flip() // DEBUG_MSG( // ".dataReceived; entity collected\n starts with " // + firstTen(response.getEntity())); // } catch (MessageFormatException mfe) { // ERROR_MSG(".dataReceived, from collectEntity: " // + mfe); // } // DEBUG_MSG(".dataReceived, calling _close()"); // _close(); // } // } // GEEP } /** * This may alter parserState. */ public void parseHeaderSection() { if (version == V0_9) { contentLength = -1; parserState = START_ENTITY; try { response = new HttpResponse(); } catch (MessageFormatException mfe) { ERROR_MSG("impossible exception: " + mfe); } } else { // HTTP/1.x /////////////////////////////////// // a better version would handle an incomplete // header section properly, setting IN_HEAD try { response = parseStatusLine(dataIn); DEBUG_MSG(".parseHeaderSection, status line is:\n " + response.getStatusLine()); // collect headers collectHeaders(response, dataIn); DEBUG_MSG(".parseHeaderSection, content length = " + contentLength); skipEOL(dataIn); // adjusts position DEBUG_MSG(".parseHeaderSection, before compacting" + "\n position = " + dataIn.position() + "\n limit = " + dataIn.limit() ); dataIn.compact(); // sets limit to capacity dataIn.flip(); DEBUG_MSG(".parseHeaderSection, after compacting" + "\n position = " + dataIn.position() + "\n limit = " + dataIn.limit() + "\n remaining = " + dataIn.remaining() + "\n contentLength = " + contentLength ); parserState = START_ENTITY; } catch (MessageFormatException mfe) { ERROR_MSG(": " + mfe); parserState = ABORT_PARSE; _close(); return; } } } public void reportDisconnect () { DEBUG_MSG(": unexpected disconnection"); _close(); } public void reportException (Exception exc) { DEBUG_MSG(": unexpected exception: " + exc); _close(); } // PARSER METHODS /////////////////////////////////////////////// /** * Parse the first part of an HTTP response, creating the data * structure. Any of CR, LF, or CRLF is a line ending. * * @param inBuf reference to dataIn, a convenience for testing * @return reference to the HttpResponse created */ public HttpResponse parseStatusLine(ByteBuffer inBuf) throws MessageFormatException { if (inBuf == null) throw new IllegalArgumentException ("null in buffer"); int curByte = 0; byte[] b = inBuf.array(); int replyVersion; // status line: /////////////////////////////////// // HTML/1.x NNN response-phrase EOL // white space should be interpreted liberally curByte = expect("HTTP/1.", b, curByte); char c = castByte(b[curByte++]); if (c == '1') replyVersion = V1_1; else if (c == '0') replyVersion = V1_0; else throw new MessageFormatException("unsupported HTTP version 1." + c); curByte = skipW (b, curByte); int code = expect3Digits(b, curByte); curByte += 3; curByte = skipW (b, curByte); // expect and skip white space int start = curByte; curByte = skipToEOL (b, start); String s = new String (b, start, curByte - start); curByte = skipEOL (b, curByte); // expect and skip inBuf.position(curByte); return new HttpResponse(replyVersion, code, s); } // PROPERTIES /////////////////////////////////////////////////// SchedulableConnection getConnection() { return cnx; } /** * Only useful for test purposes. */ byte[] getDataIn() { if (dataIn == null) return null; else { // // DEBUG // SHA1Digest sha1 = null; // try {sha1 = new SHA1Digest(); } catch (CryptoException ce){}; // sha1.update(dataIn.array()); // byte [] hash = sha1.digest(); // DEBUG_MSG(": " + uri + // " DataIn hash = " + StringLib.byteArrayToHex(hash)); // // END return dataIn.array(); } } /** * Only useful for test purposes. */ byte[] getDataOut() { if (dataOut == null) return null; else return dataOut.array(); } // OTHER METHODS //////////////////////////////////////////////// private void _close() { try { cnx.getChannel().close(); // cancels the key } catch (IOException e) { /* ignore */ } } }
/* * Copyright 2010-2017 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.maven.largetest; import org.flywaydb.core.internal.util.StringUtils; import org.junit.Test; import org.flywaydb.core.internal.util.FileCopyUtils; import org.w3c.dom.Document; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathFactory; import java.io.File; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Large Test for the Flyway Maven Plugin. */ @SuppressWarnings({"JavaDoc"}) public abstract class MavenTestCase { private String installDir = System.getProperty("installDir", "flyway-maven-plugin-largetest/target"); /** * The installation directory for the test POMs. */ private String pomInstallDir = new File(installDir + "/test-classes").getAbsolutePath(); @Test public void regular() throws Exception { String stdOut = runMaven(0, "regular", "clean", "compile", "flyway:baseline", "flyway:info", "-Dflyway.initVersion=0.1", "-Dflyway.user=SA"); assertTrue(stdOut.contains("<< Flyway Baseline >>")); } @Test public void migrate() throws Exception { String stdOut = runMaven(0, "regular", "clean", "compile", "flyway:migrate", "-Dflyway.user=SA"); assertTrue(stdOut.contains("Successfully applied 2 migrations")); assertFalse(stdOut.contains("deprecated")); } @Test public void executions() throws Exception { String stdOut = runMaven(0, "executions", "clean", "install", "-Dflyway.user=SA"); assertTrue(stdOut.contains("[INFO] Successfully cleaned schema \"PUBLIC\"")); assertTrue(stdOut.contains("[echo] Property: flyway.current = 1.1")); } @Test public void sample() throws Exception { String stdOut = runMaven(0, "sample", "clean", "compile", "flyway:clean", "flyway:migrate"); assertTrue(stdOut.contains("Successfully applied 5 migrations")); } @Test public void configFile() throws Exception { String stdOut = runMaven(0, "configfile", "clean", "compile", "flyway:clean", "flyway:migrate"); assertTrue(stdOut.contains("Successfully applied 5 migrations")); } @Test public void configFileInvalid() throws Exception { String stdOut = runMaven(1, "configfile", "-Dflyway.configFile=test.properties", "flyway:info"); assertTrue(stdOut.contains("Unable to read config file")); } @Test public void settings() throws Exception { String stdOut = runMaven(0, "settings", "clean", "compile", "flyway:baseline", "flyway:info", "-s", pomInstallDir + "/settings/settings.xml"); assertTrue(stdOut.contains("<< Flyway Baseline >>")); } /** * Tests the use of settings.xml with a default server id. */ @Test public void settingsDefault() throws Exception { String stdOut = runMaven(0, "settings-default", "clean", "compile", "flyway:baseline", "flyway:info", "-s", pomInstallDir + "/settings-default/settings.xml"); assertTrue(stdOut.contains("<< Flyway Baseline >>")); } /** * Tests the use of settings.xml with an encrypted password. */ @Test public void settingsEncrypted() throws Exception { String dir = pomInstallDir + "/settings-encrypted"; String stdOut = runMaven(0, "settings-encrypted", "clean", "sql:execute", "flyway:baseline", "-s=" + dir + "/settings.xml", "-Dsettings.security=" + dir + "/settings-security.xml"); assertTrue(stdOut.contains("Successfully baselined schema with version: 1")); } @Test public void locationsElements() throws Exception { String stdOut = runMaven(0, "locations-elements", "clean", "compile", "flyway:migrate"); assertTrue(stdOut.contains("Successfully applied 2 migrations")); } @Test public void locationsProperty() throws Exception { String stdOut = runMaven(0, "locations-property", "clean", "compile", "flyway:migrate"); assertTrue(stdOut.contains("Successfully applied 2 migrations")); } @Test public void callbacksProperty() throws Exception { String stdOut = runMaven(0, "callbacks-property", "clean", "compile", "flyway:info"); assertTrue(stdOut.contains("beforeInfo")); assertTrue(stdOut.contains("afterInfo")); } @Test public void skip() throws Exception { String stdOut = runMaven(0, "skip", "flyway:migrate"); assertTrue(stdOut.contains("Skipping Flyway execution")); } /** * Runs Maven in this directory with these extra arguments. * * @param expectedReturnCode The expected return code for this invocation. * @param dir The directory below src/test/resources to run maven in. * @param extraArgs The extra arguments (if any) for Maven. * @return The standard output. * @throws Exception When the execution failed. */ private String runMaven(int expectedReturnCode, String dir, String... extraArgs) throws Exception { String flywayVersion = System.getProperty("flywayVersion", getPomVersion()); String extension = ""; if (System.getProperty("os.name").startsWith("Windows")) { extension = ".bat"; } String mavenHome = installDir + "/install/apache-maven-" + getMavenVersion(); List<String> args = new ArrayList<String>(); args.add(mavenHome + "/bin/mvn" + extension); args.add("-Dflyway.version=" + flywayVersion); args.add("-X"); args.addAll(Arrays.asList(extraArgs)); ProcessBuilder builder = new ProcessBuilder(args); builder.directory(new File(pomInstallDir + "/" + dir)); builder.redirectErrorStream(true); builder.environment().put("M2_HOME", mavenHome); System.out.println("Executing: " + StringUtils.collectionToDelimitedString(builder.command(), " ")); Process process = builder.start(); String stdOut = FileCopyUtils.copyToString(new InputStreamReader(process.getInputStream(), "UTF-8")); int returnCode = process.waitFor(); System.out.print(stdOut); assertEquals("Unexpected return code", expectedReturnCode, returnCode); return stdOut; } /** * Retrieves the version embedded in the project pom. Useful for running these tests in IntelliJ. * * @return The POM version. */ private String getPomVersion() { try { File pom = new File("pom.xml"); if (!pom.exists()) { return "unknown"; } XPath xPath = XPathFactory.newInstance().newXPath(); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(false); Document document = documentBuilderFactory.newDocumentBuilder().parse(pom); return xPath.evaluate("/project/version", document); } catch (Exception e) { throw new IllegalStateException("Unable to read POM version", e); } } /** * @return The Maven version to test against. */ protected abstract String getMavenVersion(); }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.admin.client.mvp.view.widget; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.gwt.dom.client.Element; import com.google.gwt.editor.client.IsEditor; import com.google.gwt.editor.client.adapters.TakesValueEditor; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.text.shared.Renderer; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Focusable; import com.google.gwt.user.client.ui.HasEnabled; import com.google.gwt.user.client.ui.HasValue; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.view.client.ProvidesKey; import com.google.gwt.view.client.SimpleKeyProvider; public class MultiValueListBox<T> extends Composite implements Focusable, HasValue<List<T>>, HasEnabled, IsEditor<TakesValueEditor<List<T>>> { private final List<T> values = new ArrayList<T>(); private final Map<Object, Integer> valueKeyToIndex = new HashMap<Object, Integer>(); private final Renderer<T> renderer; private final ProvidesKey<T> keyProvider; private TakesValueEditor<List<T>> editor; private List<T> value; public MultiValueListBox(Renderer<T> renderer) { this(renderer, new SimpleKeyProvider<T>()); } public MultiValueListBox(Renderer<T> renderer, ProvidesKey<T> keyProvider) { this.keyProvider = keyProvider; this.renderer = renderer; ListBox listBox = new ListBox(); listBox.setMultipleSelect(true); initWidget(listBox); getListBox().addChangeHandler(new ChangeHandler() { public void onChange(ChangeEvent event) { List<T> newValue = new ArrayList<>(); for (int i=0;i<values.size();i++) { if (getListBox().isItemSelected(i)) { newValue.add(values.get(i)); } } setValue(newValue, true); } }); } public HandlerRegistration addValueChangeHandler(ValueChangeHandler<List<T>> handler) { return addHandler(handler, ValueChangeEvent.getType()); } /** * Returns a {@link TakesValueEditor} backed by the ValueListBox. */ public TakesValueEditor<List<T>> asEditor() { if (editor == null) { editor = TakesValueEditor.of(this); } return editor; } @Override public int getTabIndex() { return getListBox().getTabIndex(); } public List<T> getValue() { return value; } @Override public boolean isEnabled() { return getListBox().isEnabled(); } public void setAcceptableValues(Collection<T> newValues) { values.clear(); valueKeyToIndex.clear(); ListBox listBox = getListBox(); listBox.clear(); for (T nextNewValue : newValues) { addValue(nextNewValue); } updateListBox(); } @Override public void setAccessKey(char key) { getListBox().setAccessKey(key); } @Override public void setEnabled(boolean enabled) { getListBox().setEnabled(enabled); } @Override public void setFocus(boolean focused) { getListBox().setFocus(focused); } @Override public void setTabIndex (int index) { getListBox().setTabIndex(index); } /** * Set the value and display it in the select element. Add the value to the * acceptable set if it is not already there. */ public void setValue(List<T> value) { setValue(value, false); } public void setValue(List<T> value, boolean fireEvents) { if (value == this.value || (this.value != null && this.value.equals(value))) { return; } List<T> before = this.value; this.value = value; updateListBox(); if (fireEvents) { ValueChangeEvent.fireIfNotEqual(this, before, value); } } public List<T> getValues() { return values; } private void addValue(T value) { Object key = keyProvider.getKey(value); if (valueKeyToIndex.containsKey(key)) { throw new IllegalArgumentException("Duplicate value: " + value); } valueKeyToIndex.put(key, values.size()); values.add(value); addListBoxItem(renderer.render(value)); assert values.size() == getListBox().getItemCount(); } private void addListBoxItem(String item) { addItemWithTitle(getListBox().getElement(), item, item); } private static native void addItemWithTitle(Element element, String name, String value) /*-{ var opt = $doc.createElement("option"); opt.title = name; opt.text = name; opt.value = value; element.options.add(opt); }-*/; private ListBox getListBox() { return (ListBox) getWidget(); } private void updateListBox() { if (value != null) { for (T item : value) { Object key = keyProvider.getKey(item); Integer index = valueKeyToIndex.get(key); if (index == null) { addValue(item); } index = valueKeyToIndex.get(key); getListBox().setItemSelected(index, true); } } } }
/* * Copyright (c) 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 6303187 * @summary Test that no locks are held when a monitor attribute is sampled * or notif delivered. * @author Eamonn McManus * @run clean CounterMonitorDeadlockTest * @run build CounterMonitorDeadlockTest * @run main CounterMonitorDeadlockTest 1 * @run main CounterMonitorDeadlockTest 2 * @run main CounterMonitorDeadlockTest 3 * @run main CounterMonitorDeadlockTest 4 */ import java.lang.management.ManagementFactory; import java.util.concurrent.atomic.AtomicInteger; import javax.management.Attribute; import javax.management.JMX; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.monitor.CounterMonitor; import javax.management.monitor.CounterMonitorMBean; public class CounterMonitorDeadlockTest { public static void main(String[] args) throws Exception { if (args.length != 1) throw new Exception("Arg should be test number"); int testNo = Integer.parseInt(args[0]) - 1; TestCase test = testCases[testNo]; System.out.println("Test: " + test.getDescription()); test.run(); System.out.println("Test passed"); } private static enum When {IN_GET_ATTRIBUTE, IN_NOTIFY}; private static abstract class TestCase { TestCase(String description, When when) { this.description = description; this.when = when; } void run() throws Exception { final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); final ObjectName observedName = new ObjectName("a:b=c"); final ObjectName monitorName = new ObjectName("a:type=Monitor"); mbs.registerMBean(new CounterMonitor(), monitorName); final CounterMonitorMBean monitorProxy = JMX.newMBeanProxy(mbs, monitorName, CounterMonitorMBean.class); final TestMBean observedProxy = JMX.newMBeanProxy(mbs, observedName, TestMBean.class); final Runnable sensitiveThing = new Runnable() { public void run() { doSensitiveThing(monitorProxy, observedName); } }; final Runnable nothing = new Runnable() { public void run() {} }; final Runnable withinGetAttribute = (when == When.IN_GET_ATTRIBUTE) ? sensitiveThing : nothing; mbs.registerMBean(new Test(withinGetAttribute), observedName); monitorProxy.addObservedObject(observedName); monitorProxy.setObservedAttribute("Thing"); monitorProxy.setInitThreshold(100); monitorProxy.setGranularityPeriod(10L); // 10 ms monitorProxy.setNotify(true); monitorProxy.start(); final int initGetCount = observedProxy.getGetCount(); int getCount = initGetCount; for (int i = 0; i < 500; i++) { // 500 * 10 = 5 seconds getCount = observedProxy.getGetCount(); if (getCount != initGetCount) break; Thread.sleep(10); } if (getCount <= initGetCount) throw new Exception("Test failed: presumable deadlock"); // This won't show up as a deadlock in CTRL-\ or in // ThreadMXBean.findDeadlockedThreads(), because they don't // see that thread A is waiting for thread B (B.join()), and // thread B is waiting for a lock held by thread A // Now we know the monitor has observed the initial value, // so if we want to test notify behaviour we can trigger by // exceeding the threshold. if (when == When.IN_NOTIFY) { final AtomicInteger notifCount = new AtomicInteger(); final NotificationListener listener = new NotificationListener() { public void handleNotification(Notification n, Object h) { Thread t = new Thread(sensitiveThing); t.start(); try { t.join(); } catch (InterruptedException e) { throw new RuntimeException(e); } notifCount.incrementAndGet(); } }; mbs.addNotificationListener(monitorName, listener, null, null); observedProxy.setThing(1000); for (int i = 0; i < 500 && notifCount.get() == 0; i++) Thread.sleep(10); if (notifCount.get() == 0) throw new Exception("Test failed: presumable deadlock"); } } abstract void doSensitiveThing(CounterMonitorMBean monitorProxy, ObjectName observedName); String getDescription() { return description; } private final String description; private final When when; } private static final TestCase[] testCases = { new TestCase("Remove monitored MBean within monitored getAttribute", When.IN_GET_ATTRIBUTE) { @Override void doSensitiveThing(CounterMonitorMBean monitorProxy, ObjectName observedName) { monitorProxy.removeObservedObject(observedName); } }, new TestCase("Stop monitor within monitored getAttribute", When.IN_GET_ATTRIBUTE) { @Override void doSensitiveThing(CounterMonitorMBean monitorProxy, ObjectName observedName) { monitorProxy.stop(); } }, new TestCase("Remove monitored MBean within threshold listener", When.IN_NOTIFY) { @Override void doSensitiveThing(CounterMonitorMBean monitorProxy, ObjectName observedName) { monitorProxy.removeObservedObject(observedName); } }, new TestCase("Stop monitor within threshold listener", When.IN_NOTIFY) { @Override void doSensitiveThing(CounterMonitorMBean monitorProxy, ObjectName observedName) { monitorProxy.stop(); } }, }; public static interface TestMBean { public int getThing(); public void setThing(int thing); public int getGetCount(); } public static class Test implements TestMBean { public Test(Runnable runWithinGetAttribute) { this.runWithinGetAttribute = runWithinGetAttribute; } public int getThing() { Thread t = new Thread(runWithinGetAttribute); t.start(); try { t.join(); } catch (InterruptedException e) { throw new RuntimeException(e); } getCount++; return thing; } public void setThing(int thing) { this.thing = thing; } public int getGetCount() { return getCount; } private final Runnable runWithinGetAttribute; private volatile int getCount; private volatile int thing; } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.clustercontroller.core; import com.yahoo.jrt.Supervisor; import com.yahoo.jrt.Transport; import com.yahoo.vdslib.state.Node; import com.yahoo.vdslib.state.NodeState; import com.yahoo.vdslib.state.NodeType; import com.yahoo.vdslib.state.State; import com.yahoo.vespa.clustercontroller.core.database.DatabaseHandler; import com.yahoo.vespa.clustercontroller.core.database.ZooKeeperDatabaseFactory; import com.yahoo.vespa.clustercontroller.utils.util.NoMetricReporter; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.mapOf; import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.setOf; import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.usage; import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.createResourceUsageJson; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ClusterFeedBlockTest extends FleetControllerTest { private static final int NODE_COUNT = 3; // TODO dedupe fixture and setup stuff with other tests private Supervisor supervisor; private FleetController ctrl; private DummyCommunicator communicator; @Before public void setUp() { supervisor = new Supervisor(new Transport()); } private void initialize(FleetControllerOptions options) throws Exception { List<Node> nodes = new ArrayList<>(); for (int i = 0; i < options.nodes.size(); ++i) { nodes.add(new Node(NodeType.STORAGE, i)); nodes.add(new Node(NodeType.DISTRIBUTOR, i)); } var context = new TestFleetControllerContext(options); communicator = new DummyCommunicator(nodes, timer); var metricUpdater = new MetricUpdater(new NoMetricReporter(), options.fleetControllerIndex, options.clusterName); var eventLog = new EventLog(timer, metricUpdater); var cluster = new ContentCluster(options.clusterName, options.nodes, options.storageDistribution); var stateGatherer = new NodeStateGatherer(timer, timer, eventLog); var database = new DatabaseHandler(context, new ZooKeeperDatabaseFactory(context), timer, options.zooKeeperServerAddress, timer); var stateGenerator = new StateChangeHandler(context, timer, eventLog); var stateBroadcaster = new SystemStateBroadcaster(context, timer, timer); var masterElectionHandler = new MasterElectionHandler(context, options.fleetControllerIndex, options.fleetControllerCount, timer, timer); ctrl = new FleetController(context, timer, eventLog, cluster, stateGatherer, communicator, null, null, communicator, database, stateGenerator, stateBroadcaster, masterElectionHandler, metricUpdater, options); ctrl.tick(); markAllNodesAsUp(options); ctrl.tick(); } private void markAllNodesAsUp(FleetControllerOptions options) throws Exception { for (int i = 0; i < options.nodes.size(); ++i) { communicator.setNodeState(new Node(NodeType.STORAGE, i), State.UP, ""); communicator.setNodeState(new Node(NodeType.DISTRIBUTOR, i), State.UP, ""); } ctrl.tick(); } public void tearDown() throws Exception { if (supervisor != null) { supervisor.transport().shutdown().join(); supervisor = null; } super.tearDown(); } private static FleetControllerOptions createOptions(Map<String, Double> feedBlockLimits, double clusterFeedBlockNoiseLevel) { FleetControllerOptions options = defaultOptions("mycluster"); options.setStorageDistribution(DistributionBuilder.forFlatCluster(NODE_COUNT)); options.nodes = new HashSet<>(DistributionBuilder.buildConfiguredNodes(NODE_COUNT)); options.clusterFeedBlockEnabled = true; options.clusterFeedBlockLimit = Map.copyOf(feedBlockLimits); options.clusterFeedBlockNoiseLevel = clusterFeedBlockNoiseLevel; return options; } private static FleetControllerOptions createOptions(Map<String, Double> feedBlockLimits) { return createOptions(feedBlockLimits, 0.0); } private void reportResourceUsageFromNode(int nodeIndex, State nodeState, Set<FeedBlockUtil.UsageDetails> resourceUsages) throws Exception { String hostInfo = createResourceUsageJson(resourceUsages); communicator.setNodeState(new Node(NodeType.STORAGE, nodeIndex), new NodeState(NodeType.STORAGE, nodeState), hostInfo); ctrl.tick(); } private void reportResourceUsageFromNode(int nodeIndex, Set<FeedBlockUtil.UsageDetails> resourceUsages) throws Exception { reportResourceUsageFromNode(nodeIndex, State.UP, resourceUsages); } @Test public void cluster_feed_can_be_blocked_and_unblocked_by_single_node() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)))); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); // Too much cheese in use, must block feed! reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3))); assertTrue(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); // TODO check desc? // Wine usage has gone up too, we should remain blocked reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.5))); assertTrue(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); // TODO check desc? // Back to normal wine and cheese levels reportResourceUsageFromNode(1, setOf(usage("cheese", 0.6), usage("wine", 0.3))); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); } @Test public void cluster_feed_block_state_is_recomputed_when_options_are_updated() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)))); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3))); assertTrue(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); // Increase cheese allowance. Should now automatically unblock since reported usage is lower. ctrl.updateOptions(createOptions(mapOf(usage("cheese", 0.9), usage("wine", 0.4)))); ctrl.tick(); // Options propagation ctrl.tick(); // State recomputation assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); } @Test public void cluster_feed_block_state_is_recomputed_when_resource_block_set_differs() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)))); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3))); var bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.5))); bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700), " + "wine on node 1 [unknown hostname] (0.500 > 0.400)", bundle.getFeedBlock().get().getDescription()); } @Test public void cluster_feed_block_state_is_not_recomputed_when_only_resource_usage_levels_differ() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)))); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3))); var bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription()); // 80% -> 90%, should not trigger new state. reportResourceUsageFromNode(1, setOf(usage("cheese", 0.9), usage("wine", 0.3))); bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription()); } @Test public void cluster_feed_block_state_is_recomputed_when_usage_enters_hysteresis_range() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)), 0.1)); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.75), usage("wine", 0.3))); var bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.750 > 0.700)", bundle.getFeedBlock().get().getDescription()); reportResourceUsageFromNode(1, setOf(usage("cheese", 0.68), usage("wine", 0.3))); bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); // FIXME Effective limit is modified by hysteresis but due to how we check state deltas this // is not discovered here. Still correct in terms of what resources are blocked or not, but // the description is not up to date here. assertEquals("cheese on node 1 [unknown hostname] (0.750 > 0.700)", bundle.getFeedBlock().get().getDescription()); // Trigger an explicit recompute by adding a separate resource exhaustion reportResourceUsageFromNode(1, setOf(usage("cheese", 0.67), usage("wine", 0.5))); bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.670 > 0.600), " + "wine on node 1 [unknown hostname] (0.500 > 0.400)", // Not under hysteresis bundle.getFeedBlock().get().getDescription()); // Wine usage drops beyond hysteresis range, should be unblocked immediately. reportResourceUsageFromNode(1, setOf(usage("cheese", 0.61), usage("wine", 0.2))); bundle = ctrl.getClusterStateBundle(); assertTrue(bundle.clusterFeedIsBlocked()); assertEquals("cheese on node 1 [unknown hostname] (0.610 > 0.600)", bundle.getFeedBlock().get().getDescription()); // Cheese now drops below hysteresis range, should be unblocked as well. reportResourceUsageFromNode(1, setOf(usage("cheese", 0.59), usage("wine", 0.2))); bundle = ctrl.getClusterStateBundle(); assertFalse(bundle.clusterFeedIsBlocked()); } @Test public void unavailable_nodes_are_not_considered_when_computing_feed_blocked_state() throws Exception { initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4)), 0.1)); assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); reportResourceUsageFromNode(1, State.DOWN, setOf(usage("cheese", 0.8), usage("wine", 0.5))); // Not blocked, node with exhaustion is marked as Down assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked()); } // FIXME implicit changes in limits due to hysteresis adds spurious exhaustion remove+add node event pair }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.ui.configuration.libraries; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.openapi.module.Module; import com.intellij.openapi.options.ShowSettingsUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.LibraryOrderEntry; import com.intellij.openapi.roots.ModuleRootModel; import com.intellij.openapi.roots.OrderEntry; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.impl.ModuleLibraryTable; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.roots.impl.libraries.LibraryImpl; import com.intellij.openapi.roots.impl.libraries.LibraryTableImplUtil; import com.intellij.openapi.roots.libraries.*; import com.intellij.openapi.roots.ui.configuration.classpath.ClasspathPanel; import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesModifiableModel; import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureValidator; import com.intellij.openapi.ui.popup.PopupStep; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.Condition; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.newvfs.ArchiveFileSystem; import com.intellij.util.ParameterizedRunnable; import consulo.ide.settings.impl.ProjectStructureSettingsUtil; import consulo.logging.Logger; import consulo.roots.ui.configuration.ModulesConfigurator; import consulo.ui.image.Image; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; /** * @author nik */ public class LibraryEditingUtil { private static final Logger LOG = Logger.getInstance(LibraryEditingUtil.class); private LibraryEditingUtil() { } public static boolean libraryAlreadyExists(LibraryTable.ModifiableModel table, String libraryName) { for (Iterator<Library> it = table.getLibraryIterator(); it.hasNext(); ) { final Library library = it.next(); final String libName; if (table instanceof LibrariesModifiableModel) { libName = ((LibrariesModifiableModel)table).getLibraryEditor(library).getName(); } else { libName = library.getName(); } if (libraryName.equals(libName)) { return true; } } return false; } public static String suggestNewLibraryName(LibraryTable.ModifiableModel table, final String baseName) { String candidateName = baseName; int idx = 1; while (libraryAlreadyExists(table, candidateName)) { candidateName = baseName + (idx++); } return candidateName; } public static Condition<Library> getNotAddedLibrariesCondition(final ModuleRootModel rootModel) { final OrderEntry[] orderEntries = rootModel.getOrderEntries(); final Set<Library> result = new HashSet<Library>(orderEntries.length); for (OrderEntry orderEntry : orderEntries) { if (orderEntry instanceof LibraryOrderEntry && orderEntry.isValid()) { final LibraryImpl library = (LibraryImpl)((LibraryOrderEntry)orderEntry).getLibrary(); if (library != null) { final Library source = library.getSource(); result.add(source != null ? source : library); } } } return new Condition<Library>() { @Override public boolean value(Library library) { if (result.contains(library)) return false; if (library instanceof LibraryImpl) { final Library source = ((LibraryImpl)library).getSource(); if (source != null && result.contains(source)) return false; } return true; } }; } public static void copyLibrary(LibraryEx from, Map<String, String> rootMapping, LibraryEx.ModifiableModelEx target) { target.setProperties(from.getProperties()); for (OrderRootType type : OrderRootType.getAllTypes()) { final String[] urls = from.getUrls(type); for (String url : urls) { final String protocol = VirtualFileManager.extractProtocol(url); if (protocol == null) continue; final String fullPath = VirtualFileManager.extractPath(url); final int sep = fullPath.indexOf(ArchiveFileSystem.ARCHIVE_SEPARATOR); String localPath; String pathInJar; if (sep != -1) { localPath = fullPath.substring(0, sep); pathInJar = fullPath.substring(sep); } else { localPath = fullPath; pathInJar = ""; } final String targetPath = rootMapping.get(localPath); String targetUrl = targetPath != null ? VirtualFileManager.constructUrl(protocol, targetPath + pathInJar) : url; if (from.isJarDirectory(url, type)) { target.addJarDirectory(targetUrl, false, type); } else { target.addRoot(targetUrl, type); } } } } public static LibraryTablePresentation getLibraryTablePresentation(@Nonnull Project project, @Nonnull String level) { if (level.equals(LibraryTableImplUtil.MODULE_LEVEL)) { return ModuleLibraryTable.MODULE_LIBRARY_TABLE_PRESENTATION; } final LibraryTable table = LibraryTablesRegistrar.getInstance().getLibraryTableByLevel(level, project); LOG.assertTrue(table != null, level); return table.getPresentation(); } public static List<LibraryType> getSuitableTypes(ClasspathPanel classpathPanel) { List<LibraryType> suitableTypes = new ArrayList<LibraryType>(); suitableTypes.add(null); for (LibraryType libraryType : LibraryType.EP_NAME.getExtensionList()) { if (libraryType.getCreateActionName() != null && libraryType.isAvailable(classpathPanel.getRootModel())) { suitableTypes.add(libraryType); } } return suitableTypes; } public static boolean hasSuitableTypes(ClasspathPanel panel) { return getSuitableTypes(panel).size() > 1; } public static BaseListPopupStep<LibraryType> createChooseTypeStep(final ClasspathPanel classpathPanel, final ParameterizedRunnable<LibraryType> action) { return new BaseListPopupStep<LibraryType>(IdeBundle.message("popup.title.select.library.type"), getSuitableTypes(classpathPanel)) { @Nonnull @Override public String getTextFor(LibraryType value) { String createActionName = value != null ? value.getCreateActionName() : null; return createActionName != null ? createActionName : IdeBundle.message("create.default.library.type.action.name"); } @Override public Image getIconFor(LibraryType aValue) { return aValue != null ? aValue.getIcon() : AllIcons.Nodes.PpLib; } @Override public PopupStep onChosen(final LibraryType selectedValue, boolean finalChoice) { return doFinalStep(() -> action.run(selectedValue)); } }; } public static List<Module> getSuitableModules(@Nonnull Project project, final @Nullable LibraryKind kind, @Nullable Library library) { final List<Module> modules = new ArrayList<>(); LibraryType type = kind == null ? null : LibraryType.findByKind(kind); ProjectStructureSettingsUtil util = (ProjectStructureSettingsUtil)ShowSettingsUtil.getInstance(); ModulesConfigurator modulesModel = util.getModulesModel(project); for (Module module : modulesModel.getModules()) { final ModuleRootModel rootModel = modulesModel.getRootModel(module); if (type != null && !type.isAvailable(rootModel)) { continue; } if (library != null) { if (!getNotAddedLibrariesCondition(rootModel).value(library)) { continue; } } modules.add(module); } return modules; } public static void showDialogAndAddLibraryToDependencies(@Nonnull Library library, @Nonnull Project project, boolean allowEmptySelection) { ProjectStructureValidator.showDialogAndAddLibraryToDependencies(library, project, allowEmptySelection); } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dempsy.cluster.local; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.dempsy.cluster.ClusterInfoException; import net.dempsy.cluster.ClusterInfoSession; import net.dempsy.cluster.ClusterInfoSessionFactory; import net.dempsy.cluster.ClusterInfoWatcher; import net.dempsy.cluster.DirMode; import net.dempsy.cluster.DisruptibleSession; /** * This class is for running all cluster management from within the same vm, and for the same vm. It's meant to mimic the Zookeeper implementation such that callbacks are not made to watchers registered to * sessions through which changes are made. */ public class LocalClusterSessionFactory implements ClusterInfoSessionFactory { private static Logger LOGGER = LoggerFactory.getLogger(LocalClusterSessionFactory.class); protected static List<LocalSession> currentSessions = new ArrayList<LocalSession>(); protected final boolean cleanupAfterLastSession; public LocalClusterSessionFactory(final boolean cleanupAfterLastSession) { this.cleanupAfterLastSession = cleanupAfterLastSession; } public LocalClusterSessionFactory() { this(true); } // ==================================================================== // This section pertains to the management of the tree information private static Map<String, Entry> entries = new HashMap<String, Entry>(); static { reset(); } /// initially add the root. public static synchronized void reset() { entries.clear(); entries.put("/", new Entry(null, null)); } public static synchronized void completeReset() { synchronized (currentSessions) { if (!isReset()) LOGGER.error("LocalClusterSessionFactory beging reset with sessions or entries still open."); final List<LocalSession> sessions = new ArrayList<LocalSession>(currentSessions.size()); sessions.addAll(currentSessions); currentSessions.clear(); for (final LocalSession session : sessions) session.stop(false); reset(); } } public static boolean isReset() { return currentSessions.size() == 0 && entries.size() == 1; } private static synchronized Set<LocalSession.WatcherProxy> ogatherWatchers(final Entry ths, final boolean node, final boolean child) { final Set<LocalSession.WatcherProxy> twatchers = new HashSet<LocalSession.WatcherProxy>(); if (node) { twatchers.addAll(ths.nodeWatchers); ths.nodeWatchers = new HashSet<LocalSession.WatcherProxy>(); } if (child) { twatchers.addAll(ths.childWatchers); ths.childWatchers = new HashSet<LocalSession.WatcherProxy>(); } return twatchers; } private static class Entry { private final AtomicReference<Object> data = new AtomicReference<Object>(); private Set<LocalSession.WatcherProxy> nodeWatchers = new HashSet<LocalSession.WatcherProxy>(); private Set<LocalSession.WatcherProxy> childWatchers = new HashSet<LocalSession.WatcherProxy>(); private final Collection<String> children = new ArrayList<String>(); private final Map<String, AtomicLong> childSequences = new HashMap<String, AtomicLong>(); private volatile boolean inProcess = false; private final Lock processLock = new ReentrantLock(); private final DirMode mode; public Entry(final Object data, final DirMode mode) { this.data.set(data); this.mode = mode; } @Override public String toString() { return children.toString() + " " + valueOf(data.get()); } private Set<LocalSession.WatcherProxy> gatherWatchers(final boolean node, final boolean child) { return ogatherWatchers(this, node, child); } private final Set<LocalSession.WatcherProxy> toCallQueue = new HashSet<LocalSession.WatcherProxy>(); private void callWatchers(final boolean node, final boolean child) { Set<LocalSession.WatcherProxy> twatchers = gatherWatchers(node, child); processLock.lock(); try { if (inProcess) { toCallQueue.addAll(twatchers); return; } do { inProcess = true; // remove everything in twatchers from the toCallQueue // since we are about to call them all. If some end up back // on here then when we're done the toCallQueue will not be empty // and we'll run it again. toCallQueue.removeAll(twatchers); for (final LocalSession.WatcherProxy watcher : twatchers) { try { processLock.unlock(); watcher.process(); } catch (final RuntimeException e) { LOGGER.error("Failed to handle process for watcher " + objectDescription(watcher), e); } finally { processLock.lock(); } } // now we need to reset twatchers to any new toCallQueue twatchers = new HashSet<LocalSession.WatcherProxy>(); twatchers.addAll(toCallQueue); // in case we run again } while (toCallQueue.size() > 0); inProcess = false; } finally { processLock.unlock(); } } } private static class EntryAndPath { public final Entry entry; public final String pathToUse; public EntryAndPath(final Entry entry, final String pathToUse) { this.entry = entry; this.pathToUse = pathToUse; } } private static String parent(final String path) { final File f = new File(path); return f.getParent().replace('\\', '/'); } // This should only be called from a static synchronized method on the LocalClusterSessionFactory private static Entry get(final String absolutePath, final LocalSession.WatcherProxy watcher, final boolean nodeWatch) throws ClusterInfoException.NoNodeException { Entry ret; ret = entries.get(absolutePath); if (ret == null) throw new ClusterInfoException.NoNodeException("Path \"" + absolutePath + "\" doesn't exists."); if (watcher != null) { if (nodeWatch) { ret.nodeWatchers.add(watcher); if (LOGGER.isTraceEnabled()) LOGGER.trace("Added [" + watcher.watcher + "] to " + ret + " at " + absolutePath); } else { ret.childWatchers.add(watcher); if (LOGGER.isTraceEnabled()) LOGGER.trace("Added [" + watcher.watcher + "] to " + ret + " at " + absolutePath); } } return ret; } private static synchronized Object ogetData(final String path, final LocalSession.WatcherProxy watcher) throws ClusterInfoException { final Entry e = get(path, watcher, true); return e.data.get(); } private static void osetData(final String path, final Object data) throws ClusterInfoException { final Entry e; synchronized (LocalClusterSessionFactory.class) { e = get(path, null, true); e.data.set(data); } e.callWatchers(true, false); } private static synchronized boolean oexists(final String path, final LocalSession.WatcherProxy watcher) { final Entry e = entries.get(path); if (e != null && watcher != null) e.nodeWatchers.add(watcher); return e != null; } private static String omkdir(final String path, final Object data, final DirMode mode) throws ClusterInfoException { final EntryAndPath results = doomkdir(path, data, mode); final Entry parent = results.entry; final String pathToUse = results.pathToUse; if (parent != null) parent.callWatchers(false, true); return pathToUse; } private static synchronized EntryAndPath doomkdir(final String path, final Object data, final DirMode mode) throws ClusterInfoException { if (oexists(path, null)) return new EntryAndPath(null, null); final String parentPath = parent(path); final Entry parent = entries.get(parentPath); if (parent == null) throw new ClusterInfoException.NoParentException("No Parent for \"" + path + "\" which is expected to be \"" + parent(path) + "\""); if (parent.mode != null && parent.mode.isEphemeral()) throw new ClusterInfoException( "Cannot add the subdirectory \"" + path + "\" to the EPHEMERAL parent directory \"" + parentPath + ".\" EPHEMERAL directories can't have children."); long seq = -1; if (mode.isSequential()) { AtomicLong cseq = parent.childSequences.get(path); if (cseq == null) parent.childSequences.put(path, cseq = new AtomicLong(0)); seq = cseq.getAndIncrement(); } final String pathToUse = seq >= 0 ? (path + String.format("%010d", seq)) : path; entries.put(pathToUse, new Entry(data, mode)); // find the relative path final int lastSlash = pathToUse.lastIndexOf('/'); parent.children.add(pathToUse.substring(lastSlash + 1)); return new EntryAndPath(parent, pathToUse); } private static void ormdir(final String path) throws ClusterInfoException { ormdir(path, true); } private static void ormdir(final String path, final boolean notifyWatchers) throws ClusterInfoException { final EntryAndParent results = doormdir(path); final Entry ths = results.entry; final Entry parent = results.parent; if (parent != null && notifyWatchers) parent.callWatchers(false, true); if (notifyWatchers) ths.callWatchers(true, true); } private static class EntryAndParent { public final Entry entry; public final Entry parent; public EntryAndParent(final Entry entry, final Entry parent) { this.entry = entry; this.parent = parent; } } private static synchronized EntryAndParent doormdir(final String path) throws ClusterInfoException { final Entry ths = entries.get(path); if (ths == null) throw new ClusterInfoException("rmdir of non existant node \"" + path + "\""); final Entry parent = entries.get(parent(path)); entries.remove(path); if (parent != null) { final int lastSlash = path.lastIndexOf('/'); parent.children.remove(path.substring(lastSlash + 1)); } return new EntryAndParent(ths, parent); } private static synchronized Collection<String> ogetSubdirs(final String path, final LocalSession.WatcherProxy watcher) throws ClusterInfoException { final Entry e = get(path, watcher, false); final Collection<String> ret = new ArrayList<String>(e.children.size()); ret.addAll(e.children); return ret; } // ==================================================================== @Override public ClusterInfoSession createSession() { synchronized (currentSessions) { final LocalSession ret = new LocalSession(); currentSessions.add(ret); return ret; } } public class LocalSession implements ClusterInfoSession, DisruptibleSession { private final List<String> localEphemeralDirs = new ArrayList<String>(); private final AtomicBoolean stopping = new AtomicBoolean(false); private class WatcherProxy { private final ClusterInfoWatcher watcher; private WatcherProxy(final ClusterInfoWatcher watcher) { this.watcher = watcher; } private final void process() { if (!stopping.get()) watcher.process(); } @Override public int hashCode() { return watcher.hashCode(); } @Override public boolean equals(final Object o) { return watcher.equals(((WatcherProxy) o).watcher); } @Override public String toString() { return watcher.toString(); } } private final WatcherProxy makeWatcher(final ClusterInfoWatcher watcher) { return watcher == null ? null : new WatcherProxy(watcher); } @Override public String mkdir(final String path, final Object data, final DirMode mode) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("mkdir called on stopped session."); final String ret = omkdir(path, data, mode); if (ret != null && mode.isEphemeral()) { synchronized (localEphemeralDirs) { localEphemeralDirs.add(ret); } } return ret; } @Override public void rmdir(final String path) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("rmdir called on stopped session."); ormdir(path); synchronized (localEphemeralDirs) { localEphemeralDirs.remove(path); } } @Override public boolean exists(final String path, final ClusterInfoWatcher watcher) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("exists called on stopped session."); return oexists(path, makeWatcher(watcher)); } @Override public Object getData(final String path, final ClusterInfoWatcher watcher) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("getData called on stopped session."); return ogetData(path, makeWatcher(watcher)); } @Override public void setData(final String path, final Object data) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("setData called on stopped session."); osetData(path, data); } @Override public Collection<String> getSubdirs(final String path, final ClusterInfoWatcher watcher) throws ClusterInfoException { if (stopping.get()) throw new ClusterInfoException("getSubdirs called on stopped session."); return ogetSubdirs(path, makeWatcher(watcher)); } @Override public void stop() { stop(true); } private void stop(final boolean notifyWatchers) { stopping.set(true); synchronized (localEphemeralDirs) { for (int i = localEphemeralDirs.size() - 1; i >= 0; i--) { try { if (LOGGER.isTraceEnabled()) LOGGER.trace("Removing ephemeral directory due to stopped session " + localEphemeralDirs.get(i)); ormdir(localEphemeralDirs.get(i), notifyWatchers); } catch (final ClusterInfoException cie) { // this can only happen in an odd race condition but // it's ok if it does since it means the dir has already // been removed from another thread. } } localEphemeralDirs.clear(); } synchronized (currentSessions) { currentSessions.remove(this); if (currentSessions.size() == 0 && cleanupAfterLastSession) reset(); } } @Override public void disrupt() { // first dump the ephemeral nodes final Set<String> parents = new HashSet<String>(); synchronized (localEphemeralDirs) { for (int i = localEphemeralDirs.size() - 1; i >= 0; i--) { try { ormdir(localEphemeralDirs.get(i), false); } catch (final ClusterInfoException cie) { // this can only happen in an odd race condition but // it's ok if it does since it means the dir has already // been removed from another thread. } } // go through all of the nodes that were just deleted and find all unique parents for (final String path : localEphemeralDirs) parents.add(parent(path)); localEphemeralDirs.clear(); } // In some tests (and this method is only for tests) there is a race condition where // the test has a thread trying to grab a shard while disrupting the session in order // to knock out anyone currently holding the shard. On heavily loaded machines this // doesn't work because the callback is notified too quickly never giving the test // enough opportunity to obtain the shard. So here we are going to sleep for some // short amount of time before making the callback. try { Thread.sleep(200); } catch (final InterruptedException ie) {} for (final String path : parents) { try { final Entry e = get(path, null, false); e.callWatchers(false, true); } catch (final ClusterInfoException.NoNodeException e) {} // this is fine } } } // end session definition private static String valueOf(final Object o) { try { return String.valueOf(o); } catch (final Throwable th) { LOGGER.warn("Failed to determine valueOf for given object", th); } return "[error]"; } private static String valueOfClass(final Object o) { try { final Class<?> clazz = o == null ? null : o.getClass(); return clazz == null ? "[null object has no class]" : clazz.getName(); } catch (final Throwable th) { LOGGER.warn("Failed to determine valueOf for given object", th); } return "[error]"; } private static String objectDescription(final Object message) { return "\"" + valueOf(message) + (message != null ? "\" of type \"" + valueOfClass(message) : "") + "\""; } }
package com.blueheronsresistance.stattracker; import android.app.IntentService; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Intent; import android.graphics.BitmapFactory; import android.net.Uri; import android.support.v4.app.NotificationCompat; import android.util.Log; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Service used for uploading image to the Stat Tracker and submitting stats */ public class ShareService extends IntentService { public ShareService() { super("ShareService"); } private static final String TAG = "ShareService"; private static final String ENCODING = "UTF-8"; private static final int NOTIFY_PROGRESS_ID = 1; private static final int NOTIFY_STATUS_ID = 2; private static final int NOTIFY_UPLOAD_ERROR_ID = 3; static final int BUF_SIZE = 8 * 1024; // size of BufferedInput/OutputStream private NotificationCompat.Builder mProgressBuilder; private NotificationManager mNotificationManager; @Override protected void onHandleIntent(Intent workIntent) { mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); mProgressBuilder = getNotificationBuilder(getString(R.string.service_progress_notification_title), getString(R.string.service_progress_notification_start)) .setCategory(NotificationCompat.CATEGORY_PROGRESS); startForeground(NOTIFY_PROGRESS_ID, mProgressBuilder.build()); // Gets data from the incoming Intent String imageName = workIntent.getStringExtra(getString(R.string.intent_extra_image_name)); File imageFile = new File(new File(getCacheDir(), getString(R.string.temp_share_directory)), imageName); String token = workIntent.getStringExtra(getString(R.string.intent_extra_token)); String issuerUrl = workIntent.getStringExtra(getString(R.string.intent_extra_issuer_url)); String uploadUrl = issuerUrl + getString(R.string.ocr_path, token); JSONObject json = uploadImage(uploadUrl, imageFile); if (json != null) { JSONObject stats = checkJson(json); if (stats != null) { String submitUrl = issuerUrl + getString(R.string.submit_path, token); String date = getDateFromFileName(imageFile.getName()); JSONObject submitStatsResponse = submitStats(submitUrl, stats, date); if (submitStatsResponse != null) { String response = submitStatsResponse(submitStatsResponse); if (response != null) { String dashboardUrl = issuerUrl + getString(R.string.dashboard_path); statusNotification(getString(R.string.service_success_notification_finished_title), getString(R.string.service_success_notification_finished_ap, response, stats.optInt("ap")), dashboardUrl); } } } } if (imageFile.delete()) { Log.d(TAG, "Image deleted: " + imageFile.getPath()); } else { Log.e(TAG, "Failed to delete image: " + imageFile.getPath()); } stopForeground(true); } private JSONObject uploadImage(String uploadUrl, File imageFile) { FileInputStream imageFIS; try { imageFIS = new FileInputStream(imageFile); } catch(FileNotFoundException ex) { uploadError(getString(R.string.service_error_upload_image_dne) + ex.getMessage()); return null; } int imageSize; try { imageSize = (int) imageFIS.getChannel().size(); } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_image_size) + ex.getMessage()); closeStream(imageFIS); return null; } Log.d(TAG, "Image size: " + imageSize); URL url; try { url = new URL(uploadUrl); // Url to upload to with auth code substituted in } catch (MalformedURLException ex) { uploadError(getString(R.string.service_error_upload_url_parse, uploadUrl, ex.getMessage())); closeStream(imageFIS); return null; } HttpURLConnection conn; try { conn = (HttpURLConnection) url.openConnection(); } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_url_connect, uploadUrl, ex.getMessage())); closeStream(imageFIS); return null; } conn.setDoInput(true); // We want to get the response data back conn.setDoOutput(true); // POST request conn.setUseCaches(false); // No cached data conn.setFixedLengthStreamingMode(imageSize); // image size in bytes conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); // Upload type for POST just having image data as the payload and nothing else OutputStream connOut; try { connOut = conn.getOutputStream(); } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_output_stream) + ex.getMessage()); conn.disconnect(); closeStream(imageFIS); return null; } BufferedInputStream imageIn = new BufferedInputStream(imageFIS); byte[] buf = new byte[BUF_SIZE]; // size of BufferedInput/OutputStream int n; // number of bytes read mProgressBuilder.setContentText(getString(R.string.service_progress_notification_upload)); // Starting image upload uploadProgress(imageSize, 0); try { int totalUploaded = 0; // total bytes uploaded, used for calculating our percentage while ((n = imageIn.read(buf, 0, BUF_SIZE)) > 0) { totalUploaded += n; try { connOut.write(buf, 0, n); } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_sending_image) + ex.getMessage()); closeStream(connOut); closeStream(imageIn); conn.disconnect(); return null; } uploadProgress(imageSize, totalUploaded); } } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_reading_image) + ex.getMessage()); closeStream(connOut); closeStream(imageIn); conn.disconnect(); return null; } closeStream(connOut); // done writing, make sure output stream is fully flushed and close since we are done with it closeStream(imageIn); int resCode; try { resCode = conn.getResponseCode(); } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_response_code_fail) + ex.getMessage()); return null; } if (resCode == 200) { Log.d(TAG, "Image upload 200 response"); InputStreamReader connIn = null; try { connIn = new InputStreamReader(conn.getInputStream()); StringBuilder response = new StringBuilder(); JSONObject json = null; mProgressBuilder.setProgress(0, 0, true); char[] cBuf = new char[BUF_SIZE]; // size of InputStreamReader buffer while ((n = connIn.read(cBuf, 0, BUF_SIZE)) > 0) { response.append(cBuf, 0, n); json = parseOCRResponse(response.toString()); ocrProgress(json); } return json; } catch (IOException ex) { uploadError(getString(R.string.service_error_upload_response_data) + ex.getMessage()); return null; } finally { closeStream(connIn); conn.disconnect(); } } else { uploadError(getString(R.string.service_error_upload_response_code_invalid) + resCode); conn.disconnect(); return null; } } private void closeStream(InputStream stream) { try { if (stream != null) { stream.close(); } } catch (IOException ex) { Log.e(TAG, "Failed closing InputStream: " + ex.getMessage()); } } private void closeStream(OutputStream stream) { try { if (stream != null) { stream.close(); } } catch (IOException ex) { Log.e(TAG, "Failed closing OutputStream: " + ex.getMessage()); } } private void closeStream(InputStreamReader stream) { try { if (stream != null) { stream.close(); } } catch (IOException ex) { Log.e(TAG, "Failed closing OutputStream: " + ex.getMessage()); } } private void uploadError(String error) { errorNotification(getString(R.string.service_error_upload_error_title), error); } private void uploadProgress(int max, int progress) { Log.d(TAG, String.format("Image upload progress: %.2f%%", ((double) progress/max) * 100)); mProgressBuilder.setProgress(max, progress, false); mNotificationManager.notify(NOTIFY_PROGRESS_ID, mProgressBuilder.build()); } private void ocrProgress(JSONObject json) { if (json.has("status")) { Log.d(TAG, json.optString("status")); mProgressBuilder.setContentText(json.optString("status")); mNotificationManager.notify(NOTIFY_PROGRESS_ID, mProgressBuilder.build()); } } private JSONObject parseOCRResponse(String response) { String[] split = response.split("\n\n"); String jsonStr = split[split.length - 1]; try { if (jsonStr.endsWith("\n")) { return new JSONObject(jsonStr.trim()); } else if (split.length > 1) { return new JSONObject(split[split.length - 2].trim()); } else { return new JSONObject(); } } catch (JSONException e) { Log.e(TAG, e.toString()); return new JSONObject(); } } private JSONObject checkJson(JSONObject json) { // check response is good then JSONObject stats; if ((stats = json.optJSONObject("stats")) != null) { Log.d(TAG, "Your screenshot has been processed, AP: " + stats.optInt("ap")); Log.d(TAG, stats.toString()); mProgressBuilder.setProgress(0, 0, false) .setContentText(getString(R.string.service_success_upload_ap, stats.optInt("ap"))); mNotificationManager.notify(NOTIFY_PROGRESS_ID, mProgressBuilder.build()); return stats; } else if (json.has("uploadError")) { uploadError(getString(R.string.service_error_upload_json_upload_error, json.optString("uploadError"))); } else { if (json.has("session")) { uploadError(getString(R.string.service_error_upload_json_session) + json.optString("session")); } else { uploadError(getString(R.string.service_error_upload_json_no_session) + json.toString()); } } return null; } private String getDateFromFileName(String fileName) { String date; Matcher regex = Pattern.compile("^profile_(\\d{4})(\\d{2})(\\d{2})_\\d{6}_\\d+(?:\\.\\S+)?$").matcher(fileName); if (regex.matches()) { Log.d(TAG, "Found date in filename"); date = String.format("%s-%s-%s", regex.group(1), regex.group(2), regex.group(3)); } else { Log.d(TAG, "Using current date"); date = new SimpleDateFormat("yyyy-MM-dd", Locale.US).format(new Date()); } Log.d(TAG, date); return date; } private JSONObject submitStats(String submitUrl, JSONObject stats, String date) { try { stats.put("date", date); } catch (JSONException e) { submitError(getString(R.string.service_error_submit_date) + e.toString()); return null; } Log.d(TAG, "submitStats stats: " + stats.toString()); URL url; try { url = new URL(submitUrl); // Url to upload to with auth code substituted in } catch (MalformedURLException ex) { submitError(getString(R.string.service_error_submit_url_parse, submitUrl, ex.getMessage())); return null; } HttpURLConnection conn; try { conn = (HttpURLConnection) url.openConnection(); } catch (IOException ex) { submitError(getString(R.string.service_error_submit_url_connect, submitUrl, ex.getMessage())); return null; } conn.setDoInput(true); // We want to get the response data back conn.setDoOutput(true); // POST request conn.setUseCaches(false); // No cached data conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded; charset=" + ENCODING); // Upload type for POST just having image data as the payload and nothing else OutputStream connOut; try { connOut = conn.getOutputStream(); } catch (IOException ex) { submitError(getString(R.string.service_error_submit_output_stream) + ex.getMessage()); conn.disconnect(); return null; } Map<String, String> params = new HashMap<>(); StringBuilder encodedParams = new StringBuilder(); String key; Iterator<String> iter = stats.keys(); byte[] encodedByteParams; while (iter.hasNext()) { key = iter.next(); params.put(key, stats.optString(key)); } try { for (Map.Entry<String, String> entry : params.entrySet()) { if (encodedParams.length() > 0) { encodedParams.append('&'); } encodedParams.append(URLEncoder.encode(entry.getKey(), ENCODING)); encodedParams.append('='); encodedParams.append(URLEncoder.encode(entry.getValue(), ENCODING)); } Log.d(TAG, "submitStats body: " + encodedParams.toString()); encodedByteParams = encodedParams.toString().getBytes(ENCODING); } catch (UnsupportedEncodingException ex) { submitError(getString(R.string.service_error_submit_encoding, ENCODING, ex.getMessage())); return null; } try { connOut.write(encodedByteParams); } catch (IOException ex) { submitError(getString(R.string.service_error_submit_sending_stats) + ex.getMessage()); closeStream(connOut); conn.disconnect(); return null; } closeStream(connOut); // done writing, make sure output stream is fully flushed and close since we are done with it int resCode; try { resCode = conn.getResponseCode(); } catch (IOException ex) { submitError(getString(R.string.service_error_submit_response_code_fail) + ex.getMessage()); return null; } if (resCode == 200) { Log.d(TAG, "Stat submission 200 response"); InputStreamReader connIn = null; try { connIn = new InputStreamReader(conn.getInputStream()); int n; // number of bytes read StringBuilder response = new StringBuilder(); char[] cBuf = new char[BUF_SIZE]; // size of InputStreamReader buffer while ((n = connIn.read(cBuf, 0, BUF_SIZE)) > 0) { response.append(cBuf, 0, n); } Log.d(TAG, "submitStats finished request"); return new JSONObject(response.toString()); } catch (IOException ex) { submitError(getString(R.string.service_error_submit_response_data) + ex.getMessage()); return null; } catch (JSONException ex) { submitError(getString(R.string.service_error_submit_response_json) + ex.getMessage()); return null; } finally { closeStream(connIn); conn.disconnect(); } } else { submitError(getString(R.string.service_error_submit_response_code_invalid) + resCode); conn.disconnect(); return null; } } private void submitError(String error) { errorNotification(getString(R.string.service_error_submit_error_title), error); } private String submitStatsResponse(JSONObject response) { if (response.optBoolean("uploadError")) { if (response.has("message")) { submitError(getString(R.string.service_error_submit_json_upload_error_message) + response.optString("message")); } else { submitError(getString(R.string.service_error_submit_json_upload_error_no_message) + response.toString()); } } else if (response.has("message")) { // We are done, hazaaa!! return response.optString("message"); } else { submitError(getString(R.string.service_error_submit_json_unknown) + response.toString()); } return null; } private void errorNotification(String title, String text) { Log.e(TAG, text); NotificationCompat.Builder mBuilder = getNotificationBuilder(title, text) .setAutoCancel(true) .setContentIntent(PendingIntent.getActivity(this, 0, new Intent(), 0)) .setCategory(NotificationCompat.CATEGORY_ERROR) .setStyle(new NotificationCompat.BigTextStyle().bigText(text)); mNotificationManager.notify(NOTIFY_UPLOAD_ERROR_ID, mBuilder.build()); } private void statusNotification(String title, String text, String url) { Log.d(TAG, text); Uri uri = Uri.parse(url); NotificationCompat.Builder mBuilder = getNotificationBuilder(title, text) .setAutoCancel(true) .setContentIntent(PendingIntent.getActivity(this, 0, new Intent(Intent.ACTION_VIEW, uri), 0)) .setCategory(NotificationCompat.CATEGORY_STATUS) .setStyle(new NotificationCompat.BigTextStyle().bigText(text)); mNotificationManager.notify(NOTIFY_STATUS_ID, mBuilder.build()); } private NotificationCompat.Builder getNotificationBuilder(String title, String text) { return new NotificationCompat.Builder(this) .setSmallIcon(R.drawable.ic_notification) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher)) .setContentTitle(title) .setContentText(text); } }
package burlap.domain.singleagent.lunarlander.state; import burlap.mdp.core.oo.state.MutableOOState; import burlap.mdp.core.oo.state.OOStateUtilities; import burlap.mdp.core.oo.state.OOVariableKey; import burlap.mdp.core.oo.state.ObjectInstance; import burlap.mdp.core.oo.state.exceptions.UnknownClassException; import burlap.mdp.core.oo.state.exceptions.UnknownObjectException; import burlap.mdp.core.state.MutableState; import burlap.mdp.core.state.State; import burlap.mdp.core.state.StateUtilities; import burlap.mdp.core.state.UnknownKeyException; import burlap.mdp.core.state.annotations.ShallowCopyState; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static burlap.domain.singleagent.lunarlander.LunarLanderDomain.*; /** * @author James MacGlashan. */ @ShallowCopyState public class LLState implements MutableOOState{ public LLAgent agent; public LLBlock.LLPad pad; public List<LLBlock.LLObstacle> obstacles; public LLState() { } public LLState(LLAgent agent, LLBlock.LLPad pad, List<LLBlock.LLObstacle> obstacles) { this.agent = agent; this.pad = pad; this.obstacles = obstacles; } public LLState(LLAgent agent, LLBlock.LLPad pad, LLBlock.LLObstacle...obstacles) { this.agent = agent; this.pad = pad; this.obstacles = Arrays.asList(obstacles); } @Override public MutableOOState addObject(ObjectInstance o) { if(o instanceof LLAgent){ agent = (LLAgent)o; } else if(o instanceof LLBlock.LLPad){ pad = (LLBlock.LLPad)o; } else if(o instanceof LLBlock){ touchObstacles().add((LLBlock.LLObstacle)o); } else{ throw new UnknownClassException(o.className()); } return this; } @Override public MutableOOState removeObject(String oname) { if(agent.name().equals(oname)){ agent = new LLAgent(); //cannot remove, so copy } else if(pad != null && pad.name().equals(oname)){ pad = null; } else{ int ind = OOStateUtilities.objectIndexWithName(obstacles, oname); if(ind != -1){ touchObstacles().remove(ind); } else{ throw new UnknownObjectException(oname); } } return this; } @Override public MutableOOState renameObject(String objectName, String newName) { if(agent.name().equals(objectName)){ throw new RuntimeException("LL Agent name must be " + objectName); } else if(pad != null && pad.name().equals(objectName)){ touchPad().name = newName; } else{ int ind = OOStateUtilities.objectIndexWithName(obstacles, objectName); if(ind != -1){ LLBlock.LLObstacle ob = obstacles.get(ind); touchObstacles().remove(ind); obstacles.add(ind, (LLBlock.LLObstacle)ob.copyWithName(newName)); } else{ throw new UnknownObjectException(objectName); } } return this; } @Override public int numObjects() { return pad != null ? 2 + obstacles.size() : 1 + obstacles.size(); } @Override public ObjectInstance object(String oname) { if(agent.name().equals(oname)){ return agent; } else if(pad != null && pad.name().equals(oname)){ return pad; } else{ int ind = OOStateUtilities.objectIndexWithName(obstacles, oname); if(ind != -1) return obstacles.get(ind); } throw new UnknownObjectException(oname); } @Override public List<ObjectInstance> objects() { List<ObjectInstance> obs = new ArrayList<ObjectInstance>(2+obstacles.size()); obs.add(agent); if(pad != null) obs.add(pad); obs.addAll(obstacles); return obs; } @Override public List<ObjectInstance> objectsOfClass(String oclass) { if(oclass.equals(CLASS_AGENT)){ return Arrays.<ObjectInstance>asList(agent); } else if(oclass.equals(CLASS_PAD)){ return pad != null ? Arrays.<ObjectInstance>asList(pad) : new ArrayList<ObjectInstance>(); } else if(oclass.equals(CLASS_OBSTACLE)){ return new ArrayList<ObjectInstance>(obstacles); } throw new UnknownClassException(oclass); } @Override public MutableState set(Object variableKey, Object value) { OOVariableKey key = OOStateUtilities.generateKey(variableKey); Double d = StateUtilities.stringOrNumber(value).doubleValue(); if(agent.name().equals(key.obName)){ if(key.obVarKey.equals(VAR_X)){ touchAgent().x = d; } else if(key.obVarKey.equals(VAR_Y)){ touchAgent().y = d; } else if(key.obVarKey.equals(VAR_VX)){ touchAgent().vx = d; } else if(key.obVarKey.equals(VAR_VY)){ touchAgent().vy = d; } else{ throw new UnknownKeyException(key.obVarKey); } } else if(pad != null && pad.name().equals(key.obName)){ if(key.obVarKey.equals(VAR_LEFT)){ touchPad().left = d; } else if(key.obVarKey.equals(VAR_RIGHT)){ touchPad().right = d; } else if(key.obVarKey.equals(VAR_BOTTOM)){ touchPad().bottom = d; } else if(key.obVarKey.equals(VAR_TOP)){ touchPad().top = d; } else{ throw new UnknownKeyException(key.obVarKey); } } else{ int ind = OOStateUtilities.objectIndexWithName(obstacles, key.obName); if(ind != -1){ if(key.obVarKey.equals(VAR_LEFT)){ touchObstacle(ind).left = d; } else if(key.obVarKey.equals(VAR_RIGHT)){ touchObstacle(ind).right = d; } else if(key.obVarKey.equals(VAR_BOTTOM)){ touchObstacle(ind).bottom = d; } else if(key.obVarKey.equals(VAR_TOP)){ touchObstacle(ind).top = d; } else{ throw new UnknownKeyException(key.obVarKey); } } else{ throw new UnknownObjectException(key.obName); } } return this; } @Override public List<Object> variableKeys() { return OOStateUtilities.flatStateKeys(this); } @Override public Object get(Object variableKey) { return OOStateUtilities.get(this, variableKey); } @Override public State copy() { return new LLState(agent, pad, obstacles); } public LLAgent touchAgent(){ agent = agent.copy(); return agent; } public LLBlock.LLPad touchPad(){ pad = pad.copy(); return pad; } public List<LLBlock.LLObstacle> touchObstacles(){ obstacles = new ArrayList<LLBlock.LLObstacle>(obstacles); return obstacles; } public LLBlock.LLObstacle touchObstacle(int ind){ LLBlock.LLObstacle obs = obstacles.get(ind).copy(); touchObstacles().remove(ind); touchObstacles().add(ind, obs); return obs; } public List<LLBlock.LLObstacle> deepTouchObstacles(){ List<LLBlock.LLObstacle> nobs = new ArrayList<LLBlock.LLObstacle>(obstacles.size()); for(LLBlock.LLObstacle obs : obstacles){ nobs.add(obs.copy()); } obstacles = nobs; return obstacles; } @Override public String toString() { return OOStateUtilities.ooStateToString(this); } }
/* * Created on Nov 18, 2004 * */ package apollo.dataadapter.chado.jdbc; import java.io.File; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.log4j.*; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import apollo.config.Config; import apollo.dataadapter.TransactionOutputAdapter; import apollo.dataadapter.chado.ChadoDatabase; import apollo.dataadapter.chado.ChadoTransaction; import apollo.dataadapter.chado.ChadoTransactionTransformer; import apollo.dataadapter.chado.ChadoUpdateTransaction; import apollo.dataadapter.chadoxml.ChadoTransactionXMLTemplate; import apollo.util.IOUtil; /** * This class is used to write/commit a List of ChadoTransactions * into a chado database using JDBC. * * @author wgm */ public class JDBCTransactionWriter extends TransactionOutputAdapter { // ----------------------------------------------------------------------- // Class/static variables // ----------------------------------------------------------------------- protected final static Logger logger = LogManager.getLogger(JDBCTransactionWriter.class); // ----------------------------------------------------------------------- // Instance variables // ----------------------------------------------------------------------- private String dbUrl; private String dbUser; private String dbPwd; private Connection conn; private Map idMap; private String writebackTemplateFilename = null; private List chadoTransMacrosFromConfig = null; // If true then print insert/update/delete SQL to stdout instead of updating the DB // The updates will be run as usual (in order to correctly simulate what would have // happened during a real commit), but then a rollback() will be issued to undo them. private boolean printOnly = false; // JC: printOnly isn't such a good name for this. Could split it into two parameters, // one that specifies a rollback() should always be done ("noCommit") and one that // specifies logging all SQL to console/file. // ----------------------------------------------------------------------- // Constructors // ----------------------------------------------------------------------- /** This is only used by test classes, should change test classes to chado db need template file as well which comes with db */ public JDBCTransactionWriter(String dbHost, String dbName, String dbUser, String pwd, int port) { this("jdbc:postgresql://" + dbHost + ":" + port + "/" + dbName, dbUser, pwd); } /** *@param chadoDB ChadoDatabase to which changes should be written. */ public JDBCTransactionWriter(ChadoDatabase chadoDB) { this(chadoDB.getJdbcUrl(),chadoDB.getLogin(),chadoDB.getPassword()); chadoTransMacrosFromConfig = chadoDB.getChadoInstance().getChadoTransMacros(); writebackTemplateFilename = chadoDB.getChadoInstance().getWritebackTemplateFile(); } /** * @param url JDBC URL of the chado database to which changes should be written. * @param dbUser Name of a login with insert/update/delete privileges * @param pwd Password for the login <code>dbUser</code> */ private JDBCTransactionWriter(String url, String dbUser, String pwd) { this.dbUser = dbUser; this.dbPwd = pwd; this.dbUrl = url; // JC: this is still a hack, but slightly less so than before... String driverClass = null; if (isPostgreSQL()) driverClass = "org.postgresql.Driver"; // JC: It shouldn't be necessary to load the driver class if we've already used // the JDBC chado adapter to *read* info. from this same database. However, // that may not always be the case, e.g. converting data from GAME XML to // chado JDBC? In any case, the real problem here is that we're duplicating // functionality in the writeback adapter that's already been covered in the // read adapter. try { if (driverClass != null) Class.forName(driverClass); conn = getConnection(); } catch(Exception e) { logger.error("Exception in JDBCTransactionWriter.init()", e); } idMap = new HashMap(); } // ----------------------------------------------------------------------- // TransactionOutputAdapter // ----------------------------------------------------------------------- /** This takes a list of chado transactions (not apollo transactions) and commits them */ protected void commitTransformedTransactions(List transformedTn) throws Exception { if (transformedTn == null || transformedTn.size() == 0) { logger.info("JDBCTransactionWriter.commitTransformedTransactions(): no transactions to commit."); return; } List macros = new ArrayList(); // add macros from config - hafta do first - has ontologies... if (chadoTransMacrosFromConfig != null) macros.addAll(chadoTransMacrosFromConfig); // Have to load transactions in the template file. macros.addAll(loadTnsFromTemplate()); // Add another transaction for src feature logger.debug("Creating srcfeature ID transaction with mapID=" + mapID + " mapType=" + mapType); ChadoTransaction srcTn = ((ChadoTransactionTransformer)transformer).createSrcFeatureIDTransaction(mapID, mapType); macros.add(srcTn); List tns = new ArrayList(); tns.addAll(macros); tns.addAll(transformedTn); // Wrap all SQL operations in one transaction in case anything is wrong Connection connection = null; try { connection = getConnection(); } catch(SQLException e) { // Popup the exception throw e; } try { connection.setAutoCommit(false); ChadoTransaction tn = null; ChadoTransaction.Operation op = null; for (Iterator it = tns.iterator(); it.hasNext();) { tn = (ChadoTransaction) it.next(); op = tn.getOperation(); logger.debug("ChadoTransaction: " + tn); if (op == ChadoTransaction.LOOKUP) { commitLookup(tn, connection); } else if (op == ChadoTransaction.INSERT) { commitInsert(tn, connection); } else if (op == ChadoTransaction.FORCE) { commitForce(tn, connection); } else if (op == ChadoTransaction.DELETE) { commitDelete(tn, connection); } else if (op == ChadoTransaction.UPDATE) { commitUpdate(tn, connection); } } // If running in printOnly mode do a rollback() if (printOnly) { try { connection.rollback(); logger.debug("JDBCTransactionWriter.commitTransaction(): " + "Successful rollback() due to running in printOnly mode"); } catch(SQLException e1) { logger.debug("JDBCTransactionWriter.commitTransaction(): " + "FAILED rollback() in printOnly mode. Please clean up the mess you just made."); } } // Otherwise (not in printOnly mode) do a commit() else { connection.commit(); } connection.close(); logger.debug("JDBCTransactionWriter.commitTransaction(): " + "Transactions saved to the database successfully."); } catch (Exception e) { logger.error("Exception in commitTransformedTransactions", e); // Have to roll back if anything went wrong try { connection.rollback(); logger.debug("JDBCTransactionWriter.commitTransaction(): " + "Successful rollback() in response to a caught exception"); } catch(SQLException e1) { logger.error("JDBCTransactionWriter.commitTransaction(): " + "Cannot roll back changes during exception throwing. " + "Please contact DBA ASAP!!!", e1); } // only popup exception if not in printonly mode; this allows // ChadoAdapter to think that transactions were committed // correctly and allow it to clear the transaction log // (which is desirable for interactive debugging) if (!printOnly) { throw e; // popup any exception. } } } // ----------------------------------------------------------------------- // JDBCTransactionWriter - private methods // ----------------------------------------------------------------------- private Connection getConnection() throws SQLException { if (conn != null && !conn.isClosed()) return conn; // Have to initialize conn = DriverManager.getConnection(dbUrl, dbUser, dbPwd); target = conn; return conn; } private String getWritebackTemplateFilename() { if (writebackTemplateFilename == null) // didnt get set from chado db, use config writebackTemplateFilename = Config.getChadoTemplateName(); return writebackTemplateFilename; } private List loadTnsFromTemplate() { List tns = new ArrayList(); String name = "conf" + File.separator + getWritebackTemplateFilename(); String tmpFileName = IOUtil.findFile(name); if (tmpFileName == null) { String m="JDBCTransactionWriter.loadTnsFromTemplate(): Cannot find xml template"+ "for chado transaction. filename: "+name; throw new IllegalStateException(m); } ChadoTransactionXMLTemplate template = new ChadoTransactionXMLTemplate(tmpFileName); Element elm = template.getElement("preamble"); NodeList children = elm.getChildNodes(); int size = children.getLength(); for (int i = 0; i < size; i++) { Node tmp = children.item(i); if (tmp.getNodeType() == Node.ELEMENT_NODE) tns.add(loadOpElement((Element)tmp)); } return tns; } private ChadoTransaction loadOpElement(Element elm) { String op = elm.getAttribute("op"); ChadoTransaction tn = null; if (op.equals("update")) tn = new ChadoUpdateTransaction(); else tn = new ChadoTransaction(); String tableName = elm.getNodeName(); tn.setTableName(tableName); if (op == null || op.length() == 0) tn.setOperation(ChadoTransaction.FORCE); else tn.setOperation(ChadoTransaction.Operation.getOperation(op)); String id = elm.getAttribute("id"); if (id != null && id.length() > 0) tn.setID(id); // Get properties NodeList children = elm.getChildNodes(); int size = children.getLength(); if (tn instanceof ChadoUpdateTransaction) { for (int i = 0; i < size; i++) { Node tmp = children.item(i); if (tmp.getNodeType() == Node.ELEMENT_NODE) { String propName = tmp.getNodeName(); String update = ((Element)tmp).getAttribute("update"); String value = getTextValue(tmp); if (update == null || update.length() == 0) tn.addProperty(propName, value); else ((ChadoUpdateTransaction)tn).addUpdateProperty(propName, value); } } } else { for (int i = 0; i < size; i++) { Node tmp = children.item(i); if (tmp.getNodeType() == Node.ELEMENT_NODE) { String propName = tmp.getNodeName(); String value = getTextValue(tmp); tn.addProperty(propName, value); } } } return tn; } private String getTextValue(Node elm) { return elm.getFirstChild().getNodeValue(); } private void commitLookup(ChadoTransaction tn, Connection conn) throws SQLException { long id = lookup(tn, conn); // if id < 0, its -1 and it failed to return a real id if (id < 0) { // Nothing found String m = "Error in JDBCTransactionWriter.commitLookup()\nLookup query "+ "failed to return anything:\n "+makeLookupQuery(tn); //throw new IllegalStateException("JDBCTransactionWriter.commitLookup(): "+id+" cannot be found."); throw new IllegalStateException(m); } } /** If found then tn.getID() is put in idMap to "tableName"_id for future use */ private long lookup(ChadoTransaction tn, Connection conn) throws SQLException { // String tableName = tn.getTableName(); // StringBuffer query = new StringBuffer(); // query.append("SELECT "); // query.append(tableName); // query.append("_id FROM "); // query.append(tableName); // Map prop = tn.getProperties(); // constructWhereClause(prop, query); String query = makeLookupQuery(tn); logger.debug("Lookup Query: " + query); Statement stat = conn.createStatement(); ResultSet result = stat.executeQuery(query); int count = 0; // Need to get id long id1 = -1; String id = tn.getID(); while (result.next()) { count ++; id1 = result.getLong(1); idMap.put(id, new Long(id1)); } cleanUp(result, stat); if (count > 1) throw new IllegalStateException("JDBCTransactionWriter.lookup(): more than one row matches the query."); return id1; } /** Uses ChadoTransaction.getUniqueKeyProps for lookup query - different for exons (those dam shared exons) */ private String makeLookupQuery(ChadoTransaction tn) { String tableName = tn.getTableName(); //String id = tn.getID(); StringBuffer query = new StringBuffer(); query.append("SELECT "); query.append(tableName); query.append("_id FROM "); query.append(tableName); //Map prop = tn.getProperties(); // for lookup only want to use props that are part of unique key. this is //currently only truly implemented for synonyms at this point, the rest just // return all props Map prop = tn.getUniqueKeyProps(); constructWhereClause(prop, query); return query.toString(); } private void constructWhereClause(Map prop, StringBuffer query) { query.append(" WHERE "); for (Iterator it = prop.keySet().iterator(); it.hasNext();) { String key = (String) it.next(); String value = (String) prop.get(key); query.append(key); query.append("="); appendValue(key, value, query); if (it.hasNext()) query.append(" AND "); } } private void cleanUp(ResultSet rs) throws SQLException { cleanUp(rs,rs.getStatement()); } private void cleanUp(ResultSet result, Statement stat) throws SQLException { if (result != null) result.close(); if (stat != null) stat.close(); } private void commitInsert(ChadoTransaction tn, Connection conn) throws SQLException { String tableName = tn.getTableName(); String tempId = tn.getID(); // temp id long idLong = -1; // In PostgreSQL we query for the id first. This is far more efficient // (at least in postgres) than querying for max id afterwards // which can take a whole minute (even though it's indexed!) // if (isPostgreSQL()) { String idQuery = "SELECT nextval('public."+tableName+"_"+tableName+"_id_seq')"; logger.debug("commitInsert PostgreSQL idQuery SQL: " + idQuery); ResultSet rs = conn.createStatement().executeQuery(idQuery); rs.next(); idLong = rs.getLong(1); if (tempId != null) idMap.put(tempId, new Long(idLong)); // map temp id to new real id cleanUp(rs); // close result set & statement } // In Sybase we'll do the max(id)+1 query first to determine the new id. // We don't have any sequences to rely on, as in PostgreSQL, and the // id columns aren't defined as Sybase IDENTITY types, so the database // won't assign an id automatically. Luckily this query is very fast // in Sybase. // else if (isSybase()) { String idQuery = "SELECT MAX(" + tableName + "_id)+1 FROM " + tableName; logger.debug("commitInsert Sybase idQuery SQL: " + idQuery); ResultSet rs = conn.createStatement().executeQuery(idQuery); rs.next(); idLong = rs.getLong(1); if (tempId != null) idMap.put(tempId, new Long(idLong)); // map temp id to new real id cleanUp(rs); // close result set & statement } else { throw new SQLException("Unknown DBMS type for JDBC URL '" + dbUrl + "'"); } // HACK to deal with non-NULLable columns in Sybase chado feature table // Should be in the table defs in the transactionXMLTemplate file, but this // information is currently hard-coded and not read from the file. if (isSybase() && tableName.equals("feature")) { tn.addProperty("is_analysis", "0"); tn.addProperty("is_obsolete", "0"); tn.addProperty("timeaccessioned", "getdate()"); tn.addProperty("timelastmodified", "getdate()"); } if (isSybase() && tableName.equals("featureloc")) { tn.addProperty("is_fmin_partial", "0"); tn.addProperty("is_fmax_partial", "0"); tn.addProperty("rank", "0"); // HACK - this will be a problem in the context of redundant featurelocs tn.addProperty("locgroup", "0"); } // END HACK StringBuffer query = new StringBuffer(); query.append("INSERT INTO "); query.append(tableName); query.append(" ("); query.append(tableName + "_id, "); Map prop = tn.getProperties(); for (Iterator it = prop.keySet().iterator(); it.hasNext();) { query.append(it.next()); if (it.hasNext()) query.append(", "); } query.append(") VALUES ("); query.append(idLong + ", "); for (Iterator it = prop.keySet().iterator(); it.hasNext();) { String key = (String) it.next(); String value = (String) prop.get(key); appendValue(key, value, query); if (it.hasNext()) query.append(", "); } query.append(")"); logger.debug("insert SQL: " + query.toString()); Statement stat = conn.createStatement(); int row = 0; try { row = stat.executeUpdate(query.toString()); } catch (SQLException e) { // System.out.println("Next exception "+e.getNextException()+" err cod "+ e.getErrorCode()+ " state "+ e.getSQLState() // +" conn warning:"+conn.getWarnings() // +"\nstmnt warning\n "+stat.getWarnings()+"\n"+" nex warning "+stat.getWarnings().getNextWarning() ); // throw new SQLException(e.getMessage() + "\nProblematic query: "+query); processException(e,stat,query.toString()); } if (row == 0) throw new IllegalStateException("JDBCTransactionWriter.commitInsert(): " + "insert cannot work."); // // Need id // if (id != null) { // //ResultSet result = stat.getGeneratedKeys(); not supported yet! // String sql = "SELECT max(" + tableName + "_id) FROM " + tableName; // debugMsgAndTime(sql); // ResultSet result = stat.executeQuery(sql); // if (result.next()) { // long idLong = result.getLong(1); // new db id // idMap.put(id, new Long(idLong)); // map temp id to new real id // cleanUp(result, stat); // } // else { // cleanUp(result, stat); // throw new IllegalStateException("JDBCTransactionWriter.commitInsert(): Cannot get id for inserting."); // } // } // else cleanUp(null, stat); } private void processException(SQLException e,Statement s, String sql) throws SQLException { // Trigger raise notices come out in the statement warnings SQLWarning warning = s.getWarnings(); while (warning != null) { logger.warn(warning.getMessage()); warning = warning.getNextWarning(); } throw new SQLException(e.getMessage() + "\nProblematic query: "+sql); } private void commitForce(ChadoTransaction tn, Connection conn) throws SQLException { // Lookup first long id = lookup(tn, conn); // Insert if cannot find if (id < 0) { // Nothing found. Do insert logger.debug(this + " lookup failed to find " + tn + ", doing forced insert"); commitInsert(tn, conn); } } private void commitUpdate(ChadoTransaction tn, Connection conn) throws SQLException { String tableName = tn.getTableName(); String id = tn.getID(); // so whats up with this? is this just to check that in fact the id/uniquename is // in the database - this wont work for exons as exon the uniquename is ignored // what if id is null? im a little confused about this /// well anyways shouldnt do this for exons - do we know if this is an exon // at this point?? - no we dont - im just gonna take this out then // presumably if we dont have a db id the update below will fail anyways right? // if in fact we need this then need to find way to work out exon issue - note // it in ChadoTransaction or something - ah there is isExon // this has to happen for exon too - this is where id gets added to idMap // for future lookups! - hmmmmmmmm..... // actually its ok to do exons just that isExon has to be true so it uses // theexon uniqueKey defined in ChadoTransaction! if (id != null) { // Need id logger.debug("id is not null - looking up id for update id: "+id); long idLong = lookup(tn, conn); if (idLong < 0) throw new IllegalStateException("JDBCTransactionWriter.commitUpdate(): cannot find record for updating."); } StringBuffer query = new StringBuffer(); query.append("UPDATE "); query.append(tableName); query.append(" SET "); Map updateProp = ((ChadoUpdateTransaction)tn).getUpdateProperies(); for (Iterator it = updateProp.keySet().iterator(); it.hasNext();) { String propName = (String) it.next(); String value = (String) updateProp.get(propName); query.append(propName); query.append("="); appendValue(propName, value, query); if (it.hasNext()) query.append(", "); } Map prop = tn.getProperties(); constructWhereClause(prop, query); logger.debug("Update Query: " + query.toString()); Statement stat = conn.createStatement(); int row = stat.executeUpdate(query.toString()); cleanUp(null, stat); if (row == 0) { // update failed to update a row - bad where clause String m="JDBCTransactionWriter.commitUpdate(): Cannot update. Update attempted "+ "but no row was effected. Check where clause."; throw new IllegalStateException(m); } else if (row > 1) throw new IllegalStateException("JDBCTransactionWriter.commitUpdate(): More than one row is updated."); } private void commitDelete(ChadoTransaction tn, Connection conn) throws SQLException { String tableName = tn.getTableName(); String id = tn.getID(); StringBuffer query = new StringBuffer(); query.append("DELETE FROM "); query.append(tableName); Map prop = tn.getProperties(); constructWhereClause(prop, query); logger.debug("Delete Query: " + query.toString()); Statement stat = conn.createStatement(); int row = stat.executeUpdate(query.toString()); cleanUp(null, stat); if ((!printOnly) && (row == 0)) throw new IllegalStateException("JDBCTransactionWriter.commitDelete(): Cannot delete a row."); else if (row > 1) throw new IllegalStateException("JDBCTransactionWriter.commitDelete(): More than one row is deleted."); } private void appendValue(String key, String value, StringBuffer query) { // Try to fetch id if (key.endsWith("_id") && idMap.containsKey(value)) query.append(idMap.get(value)); else { //if _id is not in idMap that probably means trouble! - it would be nice if we // could automatically try to lookup id (like for type_ids) but it would be hard // here as we have absolutely no context - for type_ids need to know which cv to // use if (key.endsWith("_id")) logger.debug("ERROR: no id mapped for " +key+" with value "+value+" for query " +query+" Will try to do query without it but will probably fail"); boolean addQuotes = true; // JC: HACK - don't quote values we know aren't character values if (isSybase()) { if (key.startsWith("is_") || key.startsWith("time")) { addQuotes = false; } // featureloc if (key.equals("fmin") || key.equals("fmax") || key.equals("strand") || key.equals("rank") || key.equals("locgroup") || key.equals("seqlen")) { addQuotes = false; } // going out on a limb here.. if ((value == null) || (value.equals("null"))) { addQuotes = false; } } if (addQuotes) { query.append("'"); query.append(value); query.append("'"); } else { query.append(value); } } } /** * Examines the JDBC URL to determine whether the target database is PostgresQL. */ private boolean isPostgreSQL() { return (dbUrl != null) && (dbUrl.startsWith("jdbc:postgresql")); } /** * Examines the JDBC URL to determine whether the target database is Sybase. */ private boolean isSybase() { return (dbUrl != null) && (dbUrl.startsWith("jdbc:sybase")); } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.tasks.fogbugz; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.PasswordUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.tasks.*; import com.intellij.tasks.impl.BaseRepository; import com.intellij.tasks.impl.BaseRepositoryImpl; import com.intellij.util.NotNullFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xmlb.annotations.Tag; import icons.TasksIcons; import org.apache.commons.httpclient.methods.PostMethod; import org.jdom.Document; import org.jdom.Element; import org.jdom.input.SAXBuilder; import org.jdom.xpath.XPath; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import java.util.Date; import java.util.List; /** * @author mkennedy */ @Tag("FogBugz") public class FogBugzRepository extends BaseRepositoryImpl { private static final Logger LOG = Logger.getInstance("#com.intellij.tasks.fogbugz.FogBugzRepository"); private String myToken; public FogBugzRepository(TaskRepositoryType type) { super(type); setUrl("https://example.fogbugz.com"); } private FogBugzRepository(FogBugzRepository other) { super(other); myToken = other.myToken; } @Override public boolean equals(Object o) { return super.equals(o) && Comparing.equal(myToken, ((FogBugzRepository)o).myToken); } @SuppressWarnings({"UnusedDeclaration"}) public FogBugzRepository() { } @Override public Task[] getIssues(@Nullable String query, int max, final long since) throws Exception { return getCases(StringUtil.notNullize(query)); } @SuppressWarnings("unchecked") private Task[] getCases(String q) throws Exception { loginIfNeeded(); PostMethod method = new PostMethod(getUrl() + "/api.asp"); method.addParameter("token", myToken); method.addParameter("cmd", "search"); method.addParameter("q", q); method.addParameter("cols", "sTitle,fOpen,dtOpened,dtLastUpdated,ixCategory"); int status = getHttpClient().executeMethod(method); if (status != 200) { throw new Exception("Error listing cases: " + method.getStatusLine()); } Document document = new SAXBuilder(false).build(method.getResponseBodyAsStream()).getDocument(); List<Element> errorNodes = XPath.newInstance("/response/error").selectNodes(document); if (!errorNodes.isEmpty()) { throw new Exception("Error listing cases: " + errorNodes.get(0).getText()); } final XPath commentPath = XPath.newInstance("events/event"); final List<Element> nodes = (List<Element>)XPath.newInstance("/response/cases/case").selectNodes(document); final List<Task> tasks = ContainerUtil.mapNotNull(nodes, (NotNullFunction<Element, Task>)element -> createCase(element, commentPath)); return tasks.toArray(new Task[tasks.size()]); } private static TaskType getType(Element element) { String category = element.getChildText("ixCategory"); if ("1".equals(category)) { return TaskType.BUG; } else if ("2".equals(category)) { return TaskType.FEATURE; } return TaskType.OTHER; } @NotNull private Task createCase(final Element element, final XPath commentPath) { final String id = element.getAttributeValue("ixBug"); final String title = element.getChildTextTrim("sTitle"); final TaskType type = getType(element); return new Task() { @NotNull @Override public String getId() { return id; } @NotNull @Override public String getSummary() { return title; } @Nullable @Override public String getDescription() { return null; } @NotNull @Override @SuppressWarnings("unchecked") public Comment[] getComments() { List<Element> nodes; try { nodes = commentPath.selectNodes(element); } catch (Exception e) { throw new RuntimeException("Error selecting comment nodes", e); } List<Comment> comments = ContainerUtil.mapNotNull(nodes, new NotNullFunction<Element, Comment>() { @NotNull @Override public Comment fun(Element element) { return createComment(element); } private Comment createComment(final Element element) { return new Comment() { @Override public String getText() { return element.getChildTextTrim("s"); } @Nullable @Override public String getAuthor() { return element.getChildTextTrim("sPerson"); } @Nullable @Override public Date getDate() { return parseDate(element.getChildTextTrim("dt")); } }; } }); return comments.toArray(new Comment[comments.size()]); } @NotNull @Override public Icon getIcon() { return TasksIcons.Fogbugz; } @NotNull @Override public TaskType getType() { return type; } @Nullable @Override public Date getUpdated() { return parseDate(element.getChildText("dtLastUpdated")); } @Nullable @Override public Date getCreated() { return parseDate(element.getChildTextTrim("dtOpened")); } @Override public boolean isClosed() { return !Boolean.valueOf(element.getChildTextTrim("fOpen")); } @Override public boolean isIssue() { return true; } @Nullable @Override public String getIssueUrl() { return getUrl() + "/default.asp?" + getId(); } @Nullable @Override public TaskRepository getRepository() { return FogBugzRepository.this; } }; } @Nullable @Override public Task findTask(@NotNull String id) throws Exception { Task[] tasks = getCases(id); switch (tasks.length) { case 0: return null; case 1: return tasks[0]; default: LOG.warn("Expected unique case for case id: " + id + ", got " + tasks.length + " instead. Using the first one."); return tasks[0]; } } @NotNull @Override public BaseRepository clone() { return new FogBugzRepository(this); } private void loginIfNeeded() throws Exception { if (StringUtil.isEmpty(myToken)) { login(getLoginMethod()); } } private void login(@NotNull PostMethod method) throws Exception { LOG.debug("Requesting new token"); int status = getHttpClient().executeMethod(method); if (status != 200) { throw new Exception("Error logging in: " + method.getStatusLine()); } Document document = new SAXBuilder(false).build(method.getResponseBodyAsStream()).getDocument(); XPath path = XPath.newInstance("/response/token"); Element result = (Element)path.selectSingleNode(document); if (result == null) { Element error = (Element)XPath.newInstance("/response/error").selectSingleNode(document); throw new Exception(error == null ? "Error logging in" : error.getText()); } myToken = result.getTextTrim(); } @NotNull private PostMethod getLoginMethod() { PostMethod method = new PostMethod(getUrl() + "/api.asp"); method.addParameter("cmd", "logon"); method.addParameter("email", getUsername()); method.addParameter("password", getPassword()); return method; } @NotNull private PostMethod getLogoutMethod() throws Exception { PostMethod method = new PostMethod(getUrl() + "/api.asp"); method.addParameter("cmd", "logoff"); assert myToken != null; method.addParameter("token", myToken); return method; } @Nullable @Override public CancellableConnection createCancellableConnection() { return new CancellableConnection() { PostMethod myMethod; @Override protected void doTest() throws Exception { if (StringUtil.isNotEmpty(myToken)) { myMethod = getLogoutMethod(); LOG.debug("Revoking previously used token"); getHttpClient().executeMethod(myMethod); } myMethod = getLoginMethod(); login(myMethod); } @Override public void cancel() { if (myMethod != null) { myMethod.abort(); } } }; } @NotNull private static Date parseDate(@NotNull String string) { try { return DatatypeFactory.newInstance().newXMLGregorianCalendar(string).toGregorianCalendar().getTime(); } catch (DatatypeConfigurationException e) { throw new RuntimeException("Error configuring datatype factory", e); } } @Override public String getComment() { return "{id} (e.g. 2344245), {summary}"; } @Tag("token") @NotNull public String getEncodedToken() { // The same approach as used for passwords in BaseRepository return PasswordUtil.encodePassword(myToken); } @SuppressWarnings("unused") public void setEncodedToken(@Nullable String token) { try { myToken = PasswordUtil.decodePassword(token); } catch (NumberFormatException ignored) { } } }